http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-interface/metron-alerts/e2e/mock-data/alerts_ui_e2e_index.template
----------------------------------------------------------------------
diff --git 
a/metron-interface/metron-alerts/e2e/mock-data/alerts_ui_e2e_index.template 
b/metron-interface/metron-alerts/e2e/mock-data/alerts_ui_e2e_index.template
index 7737592..caf593c 100644
--- a/metron-interface/metron-alerts/e2e/mock-data/alerts_ui_e2e_index.template
+++ b/metron-interface/metron-alerts/e2e/mock-data/alerts_ui_e2e_index.template
@@ -2,9 +2,6 @@
   "template": "alerts_ui_e2e_index",
   "mappings": {
     "alerts_ui_e2e_doc": {
-      "_timestamp": {
-        "enabled": true
-      },
       "dynamic_templates": [
       {
         "geo_location_point": {
@@ -20,8 +17,7 @@
           "match": "enrichments:geo:*:country",
           "match_mapping_type": "*",
           "mapping": {
-            "type": "string",
-            "index": "not_analyzed"
+            "type": "keyword"
           }
         }
       },
@@ -30,8 +26,7 @@
           "match": "enrichments:geo:*:city",
           "match_mapping_type": "*",
           "mapping": {
-            "type": "string",
-            "index": "not_analyzed"
+            "type": "keyword"
           }
         }
       },
@@ -40,8 +35,7 @@
           "match": "enrichments:geo:*:locID",
           "match_mapping_type": "*",
           "mapping": {
-            "type": "string",
-            "index": "not_analyzed"
+            "type": "keyword"
           }
         }
       },
@@ -50,8 +44,7 @@
           "match": "enrichments:geo:*:dmaCode",
           "match_mapping_type": "*",
           "mapping": {
-            "type": "string",
-            "index": "not_analyzed"
+            "type": "keyword"
           }
         }
       },
@@ -60,8 +53,7 @@
           "match": "enrichments:geo:*:postalCode",
           "match_mapping_type": "*",
           "mapping": {
-            "type": "string",
-            "index": "not_analyzed"
+            "type": "keyword"
           }
         }
       },
@@ -98,25 +90,27 @@
           "mapping": {
             "type": "float"
           },
-          "match": "threat.triage.rules:*:score",
+          "match": "threat:triage:*score",
           "match_mapping_type": "*"
         }
       },
       {
         "threat_triage_reason": {
           "mapping": {
-            "type": "string"
+            "type": "text",
+            "fielddata": "true"
           },
-          "match": "threat.triage.rules:*:reason",
+          "match": "threat:triage:rules:*:reason",
           "match_mapping_type": "*"
         }
       },
       {
         "threat_triage_name": {
           "mapping": {
-            "type": "string"
+            "type": "text",
+            "fielddata": "true"
           },
-          "match": "threat.triage.rules:*:name",
+          "match": "threat:triage:rules:*:name",
           "match_mapping_type": "*"
         }
       }
@@ -131,14 +125,13 @@
          * Be careful when modifying this file to not unintentionally affect 
other logs.
          * For instance, the "version" field exists in the HTTP, SSL, and SSH 
logs.  If you
          * were to only consider the SSH log, you would set the type to 
integer, but because
-         * in the SSL and HTTP logs version is a string, we must set the type 
to string.
+         * in the SSL and HTTP logs version is a string, we must set the type 
to keyword.
          */
         /*
          * Metron-specific fields
          */
         "source:type": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         /*
          * Widely-used Bro fields (potentially renamed during Metron ingest)
@@ -148,8 +141,7 @@
           "format": "epoch_millis"
         },
         "uid": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "alert": {
           "type": "nested"
@@ -171,6 +163,12 @@
          * 
https://www.bro.org/sphinx/scripts/base/protocols/http/main.bro.html#type-HTTP::Info
          *
          * Notable Fields
+         *   Field:     method
+         *   Notes:     Field exists in the HTTP and SIP logs
+         *
+         *   Field:     uri
+         *   Notes:     Field exists in the HTTP and SIP logs
+         *
          *   Field:     password
          *   Notes:     Field exists in the HTTP and FTP logs
          *
@@ -178,46 +176,54 @@
          *   Notes:     Field exists in the HTTP and FTP logs
          *
          *   Field:     trans_depth
-         *   Notes:     Field exists in the HTTP and SMTP logs
+         *   Notes:     Field exists in the HTTP, SMTP, and SIP logs
          *
          *   Field:     user_agent
-         *   Notes:     Field exists in the HTTP and SMTP logs
+         *   Notes:     Field exists in the HTTP, SMTP, and SIP logs
          *
          *   Field:     version
          *   Notes:     Field exists in the HTTP, SSL, and SSH logs
          *
          *   Field:     host
-         *   Notes:     Field exists in the HTTP and Software logs
+         *   Notes:     Field exists in the HTTP, KnownCerts, and Software logs
          *
          *   Field:     username
          *   Notes:     Field exists in the HTTP and RADIUS logs
+         *
+         *   Field:     status_code
+         *   Notes:     Field exists in the HTTP and SIP logs
+         *
+         *   Field:     status_msg
+         *   Notes:     Field exists in the HTTP and SIP logs
+         *
+         *   Field:     request_body_len
+         *   Notes:     Field exists in the HTTP and SIP logs
+         *
+         *   Field:     response_body_len
+         *   Notes:     Field exists in the HTTP and SIP logs
          */
         "trans_depth": {
           "type": "integer"
         },
         "method": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "host": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "uri": {
-          "type": "string",
-          "index": "not_analyzed",
+          "type": "keyword",
           "ignore_above": 8191
         },
         "referrer": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "version": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "user_agent": {
-          "type": "string"
+          "type": "text",
+          "fielddata": "true"
         },
         "request_body_len": {
           "type": "long"
@@ -229,19 +235,46 @@
           "type": "integer"
         },
         "status_msg": {
+          "type": "keyword"
+        },
+        "info_code": {
+          "type": "integer"
+        },
+        "info_msg": {
           "type": "string",
           "index": "not_analyzed"
         },
-        "username": {
+        "tags": {
           "type": "string",
           "index": "not_analyzed"
         },
+        "username": {
+          "type": "keyword"
+        },
         "password": {
+          "type": "keyword"
+        },
+        "proxied": {
           "type": "string",
           "index": "not_analyzed"
         },
-        "capture_password": {
-          "type": "boolean"
+        "orig_fuids": {
+          "type": "string"
+        },
+        "orig_filenames": {
+          "type": "string"
+        },
+        "orig_mime_types": {
+          "type": "string"
+        },
+        "resp_fuids": {
+          "type": "string"
+        },
+        "resp_filenames": {
+          "type": "string"
+        },
+        "resp_mime_types": {
+          "type": "string"
         },
         /*
          * DNS log support
@@ -253,38 +286,41 @@
          *
          *   Field:     trans_id
          *   Notes:     Field exists in the DNS and DHCP logs
+         *
+         *   Field:     rtt
+         *   Notes:     This field uses the "interval" type, which may need 
handled differently.
+         *              
https://www.bro.org/sphinx-git/script-reference/types.html#type-interval
          */
         "proto": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "trans_id": {
           "type": "long"
         },
-        "query": {
+        "rtt": {
           "type": "string",
           "index": "not_analyzed"
         },
+        "query": {
+          "type": "keyword"
+        },
         "qclass": {
           "type": "integer"
         },
         "qclass_name": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "qtype": {
           "type": "integer"
         },
         "qtype_name": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "rcode": {
           "type": "integer"
         },
         "rcode_name": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "AA": {
           "type": "boolean"
@@ -302,6 +338,10 @@
           "type": "integer"
         },
         "answers": {
+          "type": "text",
+          "fielddata": "true"
+        },
+        "TTLs": {
           "type": "string"
         },
         "rejected": {
@@ -322,58 +362,46 @@
          *   Notes:     Field exists in the Conn and Files logs
          */
         "service": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "duration": {
           "type": "float"
         },
         "orig_bytes": {
-          "type": "long",
-          "index": "not_analyzed"
+          "type": "long"
         },
         "resp_bytes": {
-          "type": "long",
-          "index": "not_analyzed"
+          "type": "long"
         },
         "conn_state": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "local_orig": {
           "type": "boolean"
         },
         "local_resp": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "missed_bytes": {
-          "type": "long",
-          "index": "not_analyzed"
+          "type": "long"
         },
         "history": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "orig_pkts": {
-          "type": "long",
-          "index": "not_analyzed"
+          "type": "long"
         },
         "orig_ip_bytes": {
-          "type": "long",
-          "index": "not_analyzed"
+          "type": "long"
         },
         "resp_pkts": {
-          "type": "long",
-          "index": "not_analyzed"
+          "type": "long"
         },
         "resp_ip_bytes": {
-          "type": "long",
-          "index": "not_analyzed"
+          "type": "long"
         },
         "tunnel_parents": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         /*
          * DPD log support
@@ -384,12 +412,10 @@
          *   Notes:     Field exists in the DNS, Conn, DPD, and Notice logs
          */
         "analyzer": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "failure_reason": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         /*
          * FTP log support
@@ -406,22 +432,22 @@
          *   Notes:     Field exists in the FTP and Files logs
          *
          *   Field:     fuid
-         *   Notes:     Field exists in the FTP and Notice logs
+         *   Notes:     Field exists in the FTP, Files, and Notice logs
          */
         "user": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "command": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "arg": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "mime_type": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "file_size": {
@@ -431,8 +457,7 @@
           "type": "integer"
         },
         "reply_msg": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "data_channel:passive": {
           "type": "boolean"
@@ -447,15 +472,15 @@
           "type": "integer"
         },
         "cwd": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "passive": {
           "type": "boolean"
         },
         "fuid": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         /*
          * Files log support
@@ -470,25 +495,34 @@
          *
          *   Field:     mime_type
          *   Notes:     Field exists in the FTP and Files logs
+         *
+         *   Field:     duration
+         *   Notes:     Field exists in the Conn and Files logs
+         *
+         *   Field:     local_orig
+         *   Notes:     Field exists in the Conn and Files logs
+         *
+         *   Field:     fuid
+         *   Notes:     Field exists in the FTP, Files, and Notice logs
          */
         "conn_uids": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "source": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "depth": {
           "type": "integer"
         },
         "analyzers": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "filename": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "is_orig": {
           "type": "boolean"
@@ -509,113 +543,143 @@
           "type": "boolean"
         },
         "parent_fuid": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "md5": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "sha1": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "sha256": {
+          "type": "keyword"
+        },
+        "extracted": {
           "type": "string",
           "index": "not_analyzed"
         },
+        "extracted_cutoff": {
+          "type": "boolean"
+        },
+        "extracted_size": {
+          "type": "long"
+        },
         /*
          * Known::CertInfo log support
          * 
https://www.bro.org/sphinx/scripts/policy/protocols/ssl/known-certs.bro.html#type-Known::CertsInfo
          *
          * Notable Fields
+         *   Field:     host
+         *   Notes:     Field exists in the HTTP, KnownCerts, and Software logs
+         *
          *   Field:     subject
-         *   Notes:     Field exists in the Known::CertInfo and SMTP logs
+         *   Notes:     Field exists in the KnownCerts, SMTP, SIP, and SSL logs
          */
         "port_num": {
           "type": "integer"
         },
         "subject": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "issuer_subject": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "serial": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         /*
          * SMTP log support
          * 
https://www.bro.org/sphinx/scripts/base/protocols/smtp/main.bro.html#type-SMTP::Info
          *
          * Notable Fields
+         *   Field:     trans_depth
+         *   Notes:     Field exists in the HTTP, SMTP, and SIP logs
+         *
+         *   Field:     date
+         *   Notes:     Field exists in the SMTP and SIP logs
+         *
          *   Field:     subject
-         *   Notes:     Field exists in the Known::CertInfo and SMTP logs
+         *   Notes:     Field exists in the KnownCerts, SMTP, SIP, and SSL logs
+         *
+         *   Field:     reply_to
+         *   Notes:     Field exists in the SMTP and SIP logs
+         *
+         *   Field:     user_agent
+         *   Notes:     Field exists in the HTTP, SMTP, and SIP logs
          */
         "helo": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "mailfrom": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "rcptto": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "date": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "from": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "to": {
+          "type": "text",
+          "fielddata": "true",
+          "analyzer": "simple"
+        },
+        "cc": {
           "type": "string",
           "analyzer": "simple"
         },
         "reply_to": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "msg_id": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "in_reply_to": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "x_originating_ip": {
           "type": "ip"
         },
         "first_received": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "second_received": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "last_reply": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         "path": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "tls": {
           "type": "boolean"
         },
         "fuids": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "is_webmail": {
           "type": "boolean"
@@ -627,51 +691,82 @@
          * Notable Fields
          *   Field:     version
          *   Notes:     Field exists in the HTTP, SSL, and SSH logs
+         *
+         *   Field:     subject
+         *   Notes:     Field exists in the KnownCerts, SMTP, SIP, and SSL logs
          */
         "cipher": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "curve": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "server_name": {
+          "type": "keyword"
+        },
+        "resumed": {
+          "type": "boolean"
+        },
+        "server_appdata": {
           "type": "string",
           "index": "not_analyzed"
         },
-        "resumed": {
+        "client_appdata": {
           "type": "boolean"
         },
         "last_alert": {
+          "type": "keyword"
+        },
+        "next_protocol": {
+          "type": "keyword"
+        },
+        "established": {
+          "type": "boolean"
+        },
+        "cert_chain_fuids": {
+          "type": "string"
+        },
+        "client_cert_chain_fuids": {
+          "type": "string"
+        },
+        "issuer": {
           "type": "string",
           "index": "not_analyzed"
         },
-        "next_protocol": {
+        "client_subject": {
           "type": "string",
           "index": "not_analyzed"
         },
-        "established": {
-          "type": "boolean"
+        "client_issuer": {
+          "type": "string",
+          "index": "not_analyzed"
+        },
+        "validation_status": {
+          "type": "string",
+          "index": "not_analyzed"
         },
         /*
          * Weird log support
          * 
https://www.bro.org/sphinx/scripts/base/frameworks/notice/weird.bro.html#type-Weird::Info
+         *
+         * Notable Fields
+         *   Field:     peer
+         *   Notes:     Field exists in the Weird, CaptureLoss, and Stats logs
+         *
+         *   Field:     name
+         *   Notes:     Field exists in the Weird and LoadedScripts logs
          */
         "name": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "addl": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "notice": {
           "type": "boolean"
         },
         "peer": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         /*
          * Notice log support
@@ -679,30 +774,40 @@
          *
          * Notable Fields
          *   Field:     fuid
-         *   Notes:     Field exists in the FTP and Notice logs
+         *   Notes:     Field exists in the FTP, Files, and Notice logs
          *
          *   Field:     proto
          *   Notes:     Field exists in the DNS, Conn, DPD, and Notice logs
+         *
+         *   Field:     remote_location:country_code
+         *   Notes:     Field exists in the Notice and SSH logs
+         *
+         *   Field:     remote_location:region
+         *   Notes:     Field exists in the Notice and SSH logs
+         *
+         *   Field:     remote_location:city
+         *   Notes:     Field exists in the Notice and SSH logs
+         *
+         *   Field:     remote_location:latitude
+         *   Notes:     Field exists in the Notice and SSH logs
+         *
+         *   Field:     remote_location:longitude
+         *   Notes:     Field exists in the Notice and SSH logs
          */
         "file_mime_type": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "file_desc": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "note": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "msg": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "sub": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "src": {
           "type": "ip"
@@ -711,52 +816,60 @@
           "type": "ip"
         },
         "p": {
-          "type": "integer",
-          "index": "not_analyzed"
+          "type": "integer"
         },
         "n": {
-          "type": "integer",
-          "index": "not_analyzed"
+          "type": "integer"
         },
         "src_peer": {
           "type": "ip"
         },
         "peer_descr": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "actions": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "suppress_for": {
-          "type": "double",
-          "index": "not_analyzed"
+          "type": "double"
         },
         "dropped": {
           "type": "boolean"
         },
+        "remote_location:country_code": {
+          "type": "string"
+        },
+        "remote_location:region": {
+          "type": "string"
+        },
+        "remote_location:city": {
+          "type": "string"
+        },
+        "remote_location:latitude": {
+          "type": "double"
+        },
+        "remote_location:longitude": {
+          "type": "double"
+        },
         /*
          * DHCP log support
          * 
https://www.bro.org/sphinx/scripts/base/protocols/dhcp/main.bro.html#type-DHCP::Info
          *
          * Notable Fields
+         *   Field:     mac
+         *   Notes:     Field exists in the DHCP, RADIUS, and KnownDevices logs
+         *
          *   Field:     trans_id
          *   Notes:     Field exists in the DNS and DHCP logs
-         *
-         *   Field:     mac
-         *   Notes:     Field exists in the DHCP, RADIUS, and 
Known::DevicesInfo logs
          */
         "mac": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "assigned_ip": {
           "type": "ip"
         },
         "lease_time": {
-          "type": "float",
-          "index": "not_analyzed"
+          "type": "float"
         },
         /*
          * SSH log support
@@ -765,49 +878,54 @@
          * Notable Fields
          *   Field:     version
          *   Notes:     Field exists in the HTTP, SSL, and SSH logs
+         *
+         *   Field:     remote_location:country_code
+         *   Notes:     Field exists in the Notice and SSH logs
+         *
+         *   Field:     remote_location:region
+         *   Notes:     Field exists in the Notice and SSH logs
+         *
+         *   Field:     remote_location:city
+         *   Notes:     Field exists in the Notice and SSH logs
+         *
+         *   Field:     remote_location:latitude
+         *   Notes:     Field exists in the Notice and SSH logs
+         *
+         *   Field:     remote_location:longitude
+         *   Notes:     Field exists in the Notice and SSH logs
          */
         "auth_success": {
           "type": "boolean"
         },
         "auth_attempts": {
-          "type": "integer",
-          "index": "not_analyzed"
+          "type": "integer"
         },
         "direction": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "client": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "server": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "cipher_alg": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "mac_alg": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "compression_alg": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "kex_alg": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "host_key_alg": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "host_key": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         /*
          * Software log support
@@ -815,38 +933,32 @@
          *
          * Notable Fields
          *   Field:     host
-         *   Notes:     Field exists in the HTTP and Software logs
+         *   Notes:     Field exists in the HTTP, KnownCerts, and Software logs
          */
         "host_p": {
-          "type": "integer",
-          "index": "not_analyzed"
+          "type": "integer"
         },
         "software_type": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "version:major": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "version:minor": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "version:minor2": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "version:minor3": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "version:addl": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "unparsed_version": {
-          "type": "string",
+          "type": "text",
+          "fielddata": "true",
           "analyzer": "simple"
         },
         /*
@@ -858,16 +970,29 @@
          *   Notes:     Field exists in the HTTP and RADIUS logs
          *
          *   Field:     mac
-         *   Notes:     Field exists in the DHCP, RADIUS, and 
Known::DevicesInfo logs
+         *   Notes:     Field exists in the DHCP, RADIUS, and KnownDevices logs
+         *
+         *   Field:     ttl
+         *   Notes:     This field uses the "interval" type, which may need 
handled differently.
+         *              
https://www.bro.org/sphinx-git/script-reference/types.html#type-interval
          */
+        "framed_addr": {
+          "type": "ip"
+        },
         "remote_ip": {
           "type": "ip"
         },
         "connect_info": {
+          "type": "keyword"
+        },
+        "reply_msg": {
           "type": "string",
           "index": "not_analyzed"
         },
         "result": {
+          "type": "keyword"
+        },
+        "ttl": {
           "type": "string",
           "index": "not_analyzed"
         },
@@ -880,82 +1005,64 @@
          *   Notes:     In other bro records, the id field is of type conn_id, 
so it is
          *              expanded before being logged into 4 fields, all of 
which are addressed
          *              under the "Widely-used Bro fields" section of this 
template.  In X509
-         *              logs, however, id is a string to identify the 
certificate file id.
+         *              logs, however, id is a keyword to identify the 
certificate file id.
          */
         "id": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "certificate:version": {
-          "type": "integer",
-          "index": "not_analyzed"
+          "type": "integer"
         },
         "certificate:serial": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "certificate:subject": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "certificate:issuer": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "certificate:not_valid_before": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "certificate:not_valid_after": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "certificate:key_alg": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "certificate:sig_alg": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "certificate:key_type": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "certificate:key_length": {
-          "type": "integer",
-          "index": "not_analyzed"
+          "type": "integer"
         },
         "certificate:exponent": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "certificate:curve": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "san:dns": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "san:uri": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "san:email": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "san:ip": {
-          "type": "string",
-          "index": "not_analyzed"
+          "type": "keyword"
         },
         "basic_constraints:ca": {
           "type": "boolean"
         },
         "basic_constraints:path_len": {
-          "type": "integer",
-          "index": "not_analyzed"
+          "type": "integer"
         },
         /*
          * Known::DevicesInfo log support
@@ -963,11 +1070,245 @@
          *
          * Notable Fields
          *   Field:     mac
-         *   Notes:     Field exists in the DHCP, RADIUS, and 
Known::DevicesInfo logs
+         *   Notes:     Field exists in the DHCP, RADIUS, and KnownDevices logs
          */
         "dhcp_host_name": {
-          "type": "string",
+          "type": "keyword"
+        },
+        /*
+         * RFB::Info log support
+         * 
https://www.bro.org/sphinx-git/scripts/base/protocols/rfb/main.bro.html#type-RFB::Info
+         */
+        "client_major_version": {
+          "type": "keyword"
+        },
+        "client_minor_version": {
+          "type": "keyword"
+        },
+        "server_major_version": {
+          "type": "keyword"
+        },
+        "server_minor_version": {
+          "type": "keyword"
+        },
+        "authentication_method": {
+          "type": "keyword"
+        },
+        "auth": {
+          "type": "boolean"
+        },
+        "share_flag": {
+          "type": "boolean"
+        },
+        "desktop_name": {
+          "type": "keyword"
+        },
+        "width": {
+          "type": "integer"
+        },
+        "height": {
+          "type": "integer"
+        },
+        /*
+         * Stats::Info log support
+         * 
https://www.bro.org/sphinx/scripts/policy/misc/stats.bro.html#type-Stats::Info
+         *
+         * Notable Fields
+         *   Field:     peer
+         *   Notes:     Field exists in the Weird, CaptureLoss, and Stats logs
+         *
+         *   Field:     pkt_lag
+         *   Notes:     This field uses the "interval" type, which may need 
handled differently.
+         *              
https://www.bro.org/sphinx-git/script-reference/types.html#type-interval
+         */
+        "mem": {
+          "type": "integer"
+        },
+        "pkts_proc": {
+          "type": "integer"
+        },
+        "bytes_recv": {
+          "type": "integer"
+        },
+        "pkts_dropped": {
+          "type": "integer"
+        },
+        "pkts_link": {
+          "type": "integer"
+        },
+        "pkt_lag": {
+          "type": "keyword"
+        },
+        "events_proc": {
+          "type": "integer"
+        },
+        "events_queued": {
+          "type": "integer"
+        },
+        "active_tcp_conns": {
+          "type": "integer"
+        },
+        "active_udp_conns": {
+          "type": "integer"
+        },
+        "active_icmp_conns": {
+          "type": "integer"
+        },
+        "tcp_conns": {
+          "type": "integer"
+        },
+        "udp_conns": {
+          "type": "integer"
+        },
+        "icmp_conns": {
+          "type": "integer"
+        },
+        "timers": {
+          "type": "integer"
+        },
+        "active_timers": {
+          "type": "integer"
+        },
+        "files": {
+          "type": "integer"
+        },
+        "active_files": {
+          "type": "integer"
+        },
+        "dns_requests": {
+          "type": "integer"
+        },
+        "active_dns_requests": {
+          "type": "integer"
+        },
+        "reassem_tcp_size": {
+          "type": "integer"
+        },
+        "reassem_file_size": {
+          "type": "integer"
+        },
+        "reassem_frag_size": {
+          "type": "integer"
+        },
+        "reassem_unknown_size": {
+          "type": "integer"
+        },
+        /*
+         * CaptureLoss::Info log support
+         * 
https://www.bro.org/sphinx/scripts/policy/misc/capture-loss.bro.html#type-CaptureLoss::Info
+         *
+         * Notable Fields
+         *   Field:     ts_delta
+         *   Notes:     This field uses the "interval" type, which may need 
handled differently.
+         *              
https://www.bro.org/sphinx-git/script-reference/types.html#type-interval
+         *
+         *   Field:     peer
+         *   Notes:     Field exists in the Weird, CaptureLoss, and Stats logs
+         */
+        "ts_delta": {
+          "type": "keyword"
+        },
+        "gaps": {
+          "type": "integer",
           "index": "not_analyzed"
+        },
+        "acks": {
+          "type": "integer",
+          "index": "not_analyzed"
+        },
+        "percent_lost": {
+          "type": "double",
+          "index": "not_analyzed"
+        },
+        /*
+         * Reporter::Info log support
+         * 
https://www.bro.org/sphinx/scripts/base/frameworks/reporter/main.bro.html#type-Reporter::Info
+         */
+        "level": {
+          "type": "text",
+          "fielddata": "true",
+          "analyzer": "simple"
+        },
+        "message": {
+          "type": "keyword"
+        },
+        "location": {
+          "type": "keyword"
+        },
+        /*
+         * SIP::Info log support
+         * 
https://www.bro.org/sphinx/scripts/base/protocols/sip/main.bro.html#type-SIP::Info
+         *
+         * Notable Fields
+         *   Field:     trans_depth
+         *   Notes:     Field exists in the HTTP, SMTP, and SIP logs
+         *
+         *   Field:     method
+         *   Notes:     Field exists in the HTTP and SIP logs
+         *
+         *   Field:     uri
+         *   Notes:     Field exists in the HTTP and SIP logs
+         *
+         *   Field:     date
+         *   Notes:     Field exists in the SMTP and SIP logs
+         *
+         *   Field:     reply_to
+         *   Notes:     Field exists in the SMTP and SIP logs
+         *
+         *   Field:     subject
+         *   Notes:     Field exists in the KnownCerts, SMTP, SIP, and SSL logs
+         *
+         *   Field:     user_agent
+         *   Notes:     Field exists in the HTTP, SMTP, and SIP logs
+         *
+         *   Field:     status_code
+         *   Notes:     Field exists in the HTTP and SIP logs
+         *
+         *   Field:     status_msg
+         *   Notes:     Field exists in the HTTP and SIP logs
+         *
+         *   Field:     request_body_len
+         *   Notes:     Field exists in the HTTP and SIP logs
+         *
+         *   Field:     response_body_len
+         *   Notes:     Field exists in the HTTP and SIP logs
+         */
+        "request_from": {
+          "type": "keyword"
+        },
+        "request_to": {
+          "type": "keyword"
+        },
+        "response_from": {
+          "type": "keyword"
+        },
+        "response_to": {
+          "type": "keyword"
+        },
+        "call_id": {
+          "type": "keyword"
+        },
+        "seq": {
+          "type": "keyword"
+        },
+        "request_path": {
+          "type": "text",
+          "fielddata": "true",
+          "analyzer": "simple"
+        },
+        "response_path": {
+          "type": "text",
+          "fielddata": "true",
+          "analyzer": "simple"
+        },
+        "warning": {
+          "type": "keyword"
+        },
+        "content_type": {
+          "type": "keyword"
+        },
+        "guid": {
+          "type": "keyword"
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-interface/metron-alerts/e2e/utils/e2e_util.ts
----------------------------------------------------------------------
diff --git a/metron-interface/metron-alerts/e2e/utils/e2e_util.ts 
b/metron-interface/metron-alerts/e2e/utils/e2e_util.ts
index 92476a4..8ae1de1 100644
--- a/metron-interface/metron-alerts/e2e/utils/e2e_util.ts
+++ b/metron-interface/metron-alerts/e2e/utils/e2e_util.ts
@@ -48,10 +48,23 @@ export function waitForStalenessOf (_element ) {
 export function loadTestData() {
   deleteTestData();
 
-  fs.createReadStream('e2e/mock-data/alerts_ui_e2e_index.template')
-    .pipe(request.post('http://node1:9200/_template/alerts_ui_e2e_index'));
-  fs.createReadStream('e2e/mock-data/alerts_ui_e2e_index.data')
-    
.pipe(request.post('http://node1:9200/alerts_ui_e2e_index/alerts_ui_e2e_doc/_bulk'));
+  let template = fs.readFileSync('e2e/mock-data/alerts_ui_e2e_index.template', 
'utf8');
+  request({
+    url: 'http://node1:9200/_template/alerts_ui_e2e_index',
+    method: 'POST',
+    body: template
+  }, function(error, response, body) {
+    // add logging if desired
+  });
+
+  let data = fs.readFileSync('e2e/mock-data/alerts_ui_e2e_index.data', 'utf8');
+  request({
+    url: 'http://node1:9200/alerts_ui_e2e_index/alerts_ui_e2e_doc/_bulk',
+    method: 'POST',
+    body: data
+  }, function(error, response, body) {
+    // add logging if desired
+  });
 }
 
 export function deleteTestData() {
@@ -60,8 +73,15 @@ export function deleteTestData() {
 
 export function createMetaAlertsIndex() {
   deleteMetaAlertsIndex();
-  
fs.createReadStream('./../../metron-deployment/packaging/ambari/metron-mpack/src/main/resources/common-services/METRON/CURRENT/package/files/metaalert_index.template')
-  .pipe(request.post('http://node1:9200/metaalert_index'));
+
+  let template = 
fs.readFileSync('./../../metron-deployment/packaging/ambari/metron-mpack/src/main/resources/common-services/METRON/CURRENT/package/files/metaalert_index.template',
 'utf8');
+  request({
+    url: 'http://node1:9200/_template/metaalert_index',
+    method: 'POST',
+    body: template
+  }, function(error, response, body) {
+    // add logging if desired
+  });
 }
 
 export function deleteMetaAlertsIndex() {

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-interface/metron-rest/src/main/java/org/apache/metron/rest/controller/RestExceptionHandler.java
----------------------------------------------------------------------
diff --git 
a/metron-interface/metron-rest/src/main/java/org/apache/metron/rest/controller/RestExceptionHandler.java
 
b/metron-interface/metron-rest/src/main/java/org/apache/metron/rest/controller/RestExceptionHandler.java
index 5e6f7e7..a3bab30 100644
--- 
a/metron-interface/metron-rest/src/main/java/org/apache/metron/rest/controller/RestExceptionHandler.java
+++ 
b/metron-interface/metron-rest/src/main/java/org/apache/metron/rest/controller/RestExceptionHandler.java
@@ -20,6 +20,8 @@ package org.apache.metron.rest.controller;
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.metron.rest.RestException;
 import org.apache.metron.rest.model.RestError;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.http.HttpStatus;
 import org.springframework.http.ResponseEntity;
 import org.springframework.web.bind.annotation.ControllerAdvice;
@@ -28,14 +30,17 @@ import org.springframework.web.bind.annotation.ResponseBody;
 import 
org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler;
 
 import javax.servlet.http.HttpServletRequest;
+import java.lang.invoke.MethodHandles;
 
 @ControllerAdvice(basePackages = "org.apache.metron.rest.controller")
 public class RestExceptionHandler extends ResponseEntityExceptionHandler {
+  private static final Logger LOG = 
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @ExceptionHandler(RestException.class)
   @ResponseBody
   ResponseEntity<?> handleControllerException(HttpServletRequest request, 
Throwable ex) {
     HttpStatus status = getStatus(request);
+    LOG.error("Encountered error: " + ex.getMessage(), ex);
     return new ResponseEntity<>(new RestError(status.value(), ex.getMessage(), 
ExceptionUtils.getRootCauseMessage(ex)), status);
   }
 

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-interface/metron-rest/src/main/java/org/apache/metron/rest/service/impl/UpdateServiceImpl.java
----------------------------------------------------------------------
diff --git 
a/metron-interface/metron-rest/src/main/java/org/apache/metron/rest/service/impl/UpdateServiceImpl.java
 
b/metron-interface/metron-rest/src/main/java/org/apache/metron/rest/service/impl/UpdateServiceImpl.java
index 847173e..76ac75d 100644
--- 
a/metron-interface/metron-rest/src/main/java/org/apache/metron/rest/service/impl/UpdateServiceImpl.java
+++ 
b/metron-interface/metron-rest/src/main/java/org/apache/metron/rest/service/impl/UpdateServiceImpl.java
@@ -23,15 +23,18 @@ import org.apache.metron.indexing.dao.update.PatchRequest;
 import org.apache.metron.indexing.dao.update.ReplaceRequest;
 import org.apache.metron.rest.RestException;
 import org.apache.metron.rest.service.UpdateService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Service;
 
+import java.lang.invoke.MethodHandles;
 import java.util.Optional;
 
 @Service
 public class UpdateServiceImpl implements UpdateService {
   private IndexDao dao;
-
+  private static final Logger LOG = 
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   @Autowired
   public UpdateServiceImpl(IndexDao dao) {
     this.dao = dao;
@@ -43,6 +46,7 @@ public class UpdateServiceImpl implements UpdateService {
     try {
       dao.patch(request, Optional.of(System.currentTimeMillis()));
     } catch (Exception e) {
+
       throw new RestException(e.getMessage(), e);
     }
   }

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-interface/metron-rest/src/test/java/org/apache/metron/rest/controller/SearchControllerIntegrationTest.java
----------------------------------------------------------------------
diff --git 
a/metron-interface/metron-rest/src/test/java/org/apache/metron/rest/controller/SearchControllerIntegrationTest.java
 
b/metron-interface/metron-rest/src/test/java/org/apache/metron/rest/controller/SearchControllerIntegrationTest.java
index f83fe00..d8758cd 100644
--- 
a/metron-interface/metron-rest/src/test/java/org/apache/metron/rest/controller/SearchControllerIntegrationTest.java
+++ 
b/metron-interface/metron-rest/src/test/java/org/apache/metron/rest/controller/SearchControllerIntegrationTest.java
@@ -17,7 +17,20 @@
  */
 package org.apache.metron.rest.controller;
 
+import static org.apache.metron.integration.utils.TestUtils.assertEventually;
+import static org.apache.metron.rest.MetronRestConstants.TEST_PROFILE;
+import static org.hamcrest.Matchers.hasSize;
+import static 
org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
+import static 
org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic;
+import static 
org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
+import static 
org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
+import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
+import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
+import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
 import com.google.common.collect.ImmutableMap;
+import java.util.HashMap;
+import java.util.Map;
 import org.adrianwalker.multilinestring.Multiline;
 import org.apache.metron.indexing.dao.InMemoryDao;
 import org.apache.metron.indexing.dao.SearchIntegrationTest;
@@ -37,20 +50,6 @@ import org.springframework.test.web.servlet.MockMvc;
 import org.springframework.test.web.servlet.setup.MockMvcBuilders;
 import org.springframework.web.context.WebApplicationContext;
 
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.apache.metron.integration.utils.TestUtils.assertEventually;
-import static org.apache.metron.rest.MetronRestConstants.TEST_PROFILE;
-import static org.hamcrest.Matchers.hasSize;
-import static 
org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
-import static 
org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic;
-import static 
org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
-import static 
org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
-import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
-import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
-import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
 @RunWith(SpringRunner.class)
 @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
 @ActiveProfiles(TEST_PROFILE)
@@ -147,7 +146,7 @@ public class SearchControllerIntegrationTest extends 
DaoControllerTest {
         .andExpect(status().isOk())
         
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
         .andExpect(jsonPath("$.*", hasSize(5)))
-        .andExpect(jsonPath("$.common_string_field").value("string"))
+        .andExpect(jsonPath("$.common_string_field").value("text"))
         .andExpect(jsonPath("$.common_integer_field").value("integer"))
         .andExpect(jsonPath("$.bro_field").value("boolean"))
         .andExpect(jsonPath("$.snort_field").value("double"))
@@ -262,10 +261,11 @@ public class SearchControllerIntegrationTest extends 
DaoControllerTest {
             
.andExpect(jsonPath("$.groupResults[0].groupResults[0].score").value(50));
 
     this.mockMvc.perform(post(searchUrl + 
"/column/metadata").with(httpBasic(user, 
password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content("[\"bro\",\"snort\"]"))
+
         .andExpect(status().isOk())
         
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
         .andExpect(jsonPath("$.*", hasSize(5)))
-        .andExpect(jsonPath("$.common_string_field").value("string"))
+        .andExpect(jsonPath("$.common_string_field").value("text"))
         .andExpect(jsonPath("$.common_integer_field").value("integer"))
         .andExpect(jsonPath("$.bro_field").value("boolean"))
         .andExpect(jsonPath("$.snort_field").value("double"))
@@ -275,7 +275,7 @@ public class SearchControllerIntegrationTest extends 
DaoControllerTest {
           .andExpect(status().isOk())
           
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
           .andExpect(jsonPath("$.*", hasSize(4)))
-          .andExpect(jsonPath("$.common_string_field").value("string"))
+          .andExpect(jsonPath("$.common_string_field").value("text"))
           .andExpect(jsonPath("$.common_integer_field").value("integer"))
           .andExpect(jsonPath("$.bro_field").value("boolean"))
           .andExpect(jsonPath("$.duplicate_field").value("date"));
@@ -284,7 +284,7 @@ public class SearchControllerIntegrationTest extends 
DaoControllerTest {
           .andExpect(status().isOk())
           
.andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8")))
           .andExpect(jsonPath("$.*", hasSize(4)))
-          .andExpect(jsonPath("$.common_string_field").value("string"))
+          .andExpect(jsonPath("$.common_string_field").value("text"))
           .andExpect(jsonPath("$.common_integer_field").value("integer"))
           .andExpect(jsonPath("$.snort_field").value("double"))
           .andExpect(jsonPath("$.duplicate_field").value("long"));
@@ -301,12 +301,12 @@ public class SearchControllerIntegrationTest extends 
DaoControllerTest {
   private void loadColumnTypes() throws ParseException {
     Map<String, Map<String, FieldType>> columnTypes = new HashMap<>();
     Map<String, FieldType> broTypes = new HashMap<>();
-    broTypes.put("common_string_field", FieldType.STRING);
+    broTypes.put("common_string_field", FieldType.TEXT);
     broTypes.put("common_integer_field", FieldType.INTEGER);
     broTypes.put("bro_field", FieldType.BOOLEAN);
     broTypes.put("duplicate_field", FieldType.DATE);
     Map<String, FieldType> snortTypes = new HashMap<>();
-    snortTypes.put("common_string_field", FieldType.STRING);
+    snortTypes.put("common_string_field", FieldType.TEXT);
     snortTypes.put("common_integer_field", FieldType.INTEGER);
     snortTypes.put("snort_field", FieldType.DOUBLE);
     snortTypes.put("duplicate_field", FieldType.LONG);

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-platform/elasticsearch-shaded/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/elasticsearch-shaded/pom.xml 
b/metron-platform/elasticsearch-shaded/pom.xml
index 13acf67..789ddeb 100644
--- a/metron-platform/elasticsearch-shaded/pom.xml
+++ b/metron-platform/elasticsearch-shaded/pom.xml
@@ -30,6 +30,51 @@
             <version>18.0</version>
         </dependency>
         <dependency>
+            <groupId>org.elasticsearch.client</groupId>
+            <artifactId>transport</artifactId>
+            <version>${global_elasticsearch_version}</version>
+            <exclusions>
+              <exclusion>
+                <groupId>com.fasterxml.jackson.dataformat</groupId>
+                <artifactId>jackson-dataformat-smile</artifactId>
+              </exclusion>
+              <exclusion>
+                <groupId>com.fasterxml.jackson.dataformat</groupId>
+                <artifactId>jackson-dataformat-yaml</artifactId>
+              </exclusion>
+              <exclusion>
+                <groupId>com.fasterxml.jackson.dataformat</groupId>
+                <artifactId>jackson-dataformat-cbor</artifactId>
+              </exclusion>
+              <exclusion>
+                <groupId>com.fasterxml.jackson.core</groupId>
+                <artifactId>jackson-core</artifactId>
+              </exclusion>
+              <exclusion>
+                <groupId>org.slf4j</groupId>
+                <artifactId>slf4j-api</artifactId>
+              </exclusion>
+              <exclusion>
+                <groupId>org.slf4j</groupId>
+                <artifactId>slf4j-log4j12</artifactId>
+              </exclusion>
+              <exclusion>
+                <groupId>log4j</groupId>
+                <artifactId>log4j</artifactId>
+              </exclusion>
+            </exclusions>
+          </dependency>
+          <dependency>
+            <groupId>org.apache.logging.log4j</groupId>
+            <artifactId>log4j-core</artifactId>
+            <version>2.8.2</version>
+          </dependency>
+        <dependency>
+            <groupId>org.apache.logging.log4j</groupId>
+            <artifactId>log4j-api</artifactId>
+            <version>2.8.2</version>
+          </dependency>
+          <dependency>
             <groupId>org.elasticsearch</groupId>
             <artifactId>elasticsearch</artifactId>
             <version>${global_elasticsearch_version}</version>
@@ -50,6 +95,10 @@
                     <groupId>com.fasterxml.jackson.core</groupId>
                     <artifactId>jackson-core</artifactId>
                 </exclusion>
+                <exclusion> <!-- this is causing a weird build error if not 
excluded - Error creating shaded jar: null: IllegalArgumentException -->
+                    <groupId>org.apache.logging.log4j</groupId>
+                    <artifactId>log4j-api</artifactId>
+                </exclusion>
             </exclusions>
         </dependency>
         <dependency>
@@ -99,7 +148,16 @@
                               </excludes>
                             </filter>
                           </filters>  
-                            <relocations>
+                          <relocations>
+                                <!-- The REST API has conflicts with these 
packages -->
+                                <relocation>
+                                    <pattern>io.netty</pattern>
+                                    
<shadedPattern>org.apache.metron.io.netty</shadedPattern>
+                                </relocation>
+                                <relocation>
+                                    <pattern>org.apache.logging.log4j</pattern>
+                                    
<shadedPattern>org.apache.metron.logging.log4j</shadedPattern>
+                                </relocation>
                                 <relocation>
                                     <pattern>com.google.common</pattern>
                                     
<shadedPattern>org.apache.metron.guava.elasticsearch-shaded</shadedPattern>

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-platform/elasticsearch-shaded/src/main/java/org/apache/metron/netty/utils/NettyRuntimeWrapper.java
----------------------------------------------------------------------
diff --git 
a/metron-platform/elasticsearch-shaded/src/main/java/org/apache/metron/netty/utils/NettyRuntimeWrapper.java
 
b/metron-platform/elasticsearch-shaded/src/main/java/org/apache/metron/netty/utils/NettyRuntimeWrapper.java
new file mode 100644
index 0000000..eda9e6a
--- /dev/null
+++ 
b/metron-platform/elasticsearch-shaded/src/main/java/org/apache/metron/netty/utils/NettyRuntimeWrapper.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.netty.utils;
+
+import io.netty.util.NettyRuntime;
+
+/**
+ * When working with shaded artifacts that have package relocation, you can't 
reference the deps
+ * expliticly in some IDE's, e.g. IntelliJ. This allows the shading and 
relocating to be isolated
+ * to the specific project that contains the dependency.
+ */
+public class NettyRuntimeWrapper {
+
+  public static int availableProcessors() {
+    return NettyRuntime.availableProcessors();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-platform/elasticsearch-shaded/src/main/resources/META-INF/log4j-provider.properties
----------------------------------------------------------------------
diff --git 
a/metron-platform/elasticsearch-shaded/src/main/resources/META-INF/log4j-provider.properties
 
b/metron-platform/elasticsearch-shaded/src/main/resources/META-INF/log4j-provider.properties
new file mode 100644
index 0000000..c4bd3f0
--- /dev/null
+++ 
b/metron-platform/elasticsearch-shaded/src/main/resources/META-INF/log4j-provider.properties
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LoggerContextFactory = 
org.apache.metron.logging.log4j.core.impl.Log4jContextFactory
+Log4jAPIVersion = 2.6.0
+FactoryPriority= 10
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-platform/metron-common/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/pom.xml 
b/metron-platform/metron-common/pom.xml
index f4e1e42..5bd61c9 100644
--- a/metron-platform/metron-common/pom.xml
+++ b/metron-platform/metron-common/pom.xml
@@ -90,6 +90,14 @@
             <scope>provided</scope>
             <exclusions>
                 <exclusion>
+                    <groupId>org.apache.logging.log4j</groupId>
+                    <artifactId>log4j-core</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.logging.log4j</groupId>
+                    <artifactId>log4j-api</artifactId>
+                </exclusion>
+                <exclusion>
                     <artifactId>servlet-api</artifactId>
                     <groupId>javax.servlet</groupId>
                 </exclusion>

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-platform/metron-data-management/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/README.md 
b/metron-platform/metron-data-management/README.md
index b6acd83..f284dab 100644
--- a/metron-platform/metron-data-management/README.md
+++ b/metron-platform/metron-data-management/README.md
@@ -17,6 +17,17 @@ limitations under the License.
 -->
 # Resource Data Management
 
+## Table of Contents
+
+* [Overview](#overview)
+* [Simple HBase Enrichments/Threat 
Intelligence](#simple-hbase-enrichmentsthreat-intelligence)
+* [Extractor Framework](#extractor-framework)
+* [Enrichment Config](#enrichment-config)
+* [Loading Utilities](#loading-utilities)
+* [Pruning Data from Elasticsearch](#pruning-data-from-elasticsearch)
+
+## Overview
+
 This project is a collection of classes to assist with loading of
 various enrichment and threat intelligence sources into Metron.
 
@@ -354,3 +365,39 @@ The parameters for the utility are as follows:
 | -r         | --remote_dir        | No           | HDFS directory to land 
formatted GeoIP file - defaults to /apps/metron/geo/\<epoch millis\>/     |
 | -t         | --tmp_dir           | No           | Directory for landing the 
temporary GeoIP data - defaults to /tmp                                |
 | -z         | --zk_quorum         | Yes          | Zookeeper Quorum URL 
(zk1:port,zk2:port,...)                                                     |
+
+## Pruning Data from Elasticsearch
+
+**Note** - As of the Metron upgrade from Elasticsearch 2.3.3 to 5.6.2, the 
included Data Pruner is no longer supported. It is replaced in favor of the 
Curator utility
+provided by Elasticsearch. The current Curator version is 5.4 as of this 
version of Metron and does not match exactly with ES and Kibana.
+
+Elasticsearch provides tooling to prune index data through 
[Curator](https://www.elastic.co/guide/en/elasticsearch/client/curator/5.4/index.html).
+
+Here is a sample invocation that you can configure through Cron to prune 
indexes based on timestamp in the index name.
+
+```
+/opt/elasticsearch-curator/curator_cli --host localhost delete_indices 
--filter_list '
+    {
+      "filtertype": "age",
+      "source": "name",
+      "timestring": "%Y.%m.%d",
+      "unit": "days",
+      "unit_count": 10,
+      "direction": "older”
+    }'
+```
+
+From the ES documentation:
+> Using name as the source tells Curator to look for a timestring within the 
index or snapshot name, and convert that into an epoch timestamp (epoch implies 
UTC).
+
+You can also provide multiple filters as an array of JSON objects to 
filter_list if you want finer-grained control over the indexes that will be 
pruned.
+There is an implicit logical AND when chaining multiple filters.
+
+```
+--filter_list 
'[{"filtertype":"age","source":"creation_date","direction":"older","unit":"days","unit_count":13},{"filtertype":"pattern","kind":"prefix","value":"logstash"}]'
+```
+
+### Reference
+* 
[https://www.elastic.co/guide/en/elasticsearch/client/curator/5.4/index.html](https://www.elastic.co/guide/en/elasticsearch/client/curator/5.4/index.html)
+* 
[https://www.elastic.co/guide/en/elasticsearch/client/curator/5.4/filtertype_age.html](https://www.elastic.co/guide/en/elasticsearch/client/curator/5.4/filtertype_age.html)
+* 
[https://www.elastic.co/guide/en/elasticsearch/client/curator/5.4/singleton-cli.html](https://www.elastic.co/guide/en/elasticsearch/client/curator/5.4/singleton-cli.html)

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-platform/metron-data-management/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/pom.xml 
b/metron-platform/metron-data-management/pom.xml
index 2dca9a5..7cdfc80 100644
--- a/metron-platform/metron-data-management/pom.xml
+++ b/metron-platform/metron-data-management/pom.xml
@@ -29,6 +29,7 @@
         <httpcore.version>4.3.2</httpcore.version>
         <lucene.test.version>5.5.0</lucene.test.version>
     </properties>
+
     <dependencies>
         <dependency>
             <groupId>com.google.guava</groupId>
@@ -144,7 +145,6 @@
             <version>${global_hadoop_version}</version>
             <scope>provided</scope>
         </dependency>
-
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-auth</artifactId>
@@ -189,7 +189,6 @@
             </exclusions>
             <scope>provided</scope>
         </dependency>
-
         <dependency>
             <groupId>org.apache.hbase</groupId>
             <artifactId>hbase-server</artifactId>
@@ -207,8 +206,6 @@
                     <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-hdfs</artifactId>
                 </exclusion>
-
-
                 <exclusion>
                     <groupId>org.apache.hadoop</groupId>
                     <artifactId>hadoop-common</artifactId>
@@ -232,11 +229,6 @@
             <version>${httpcore.version}</version>
         </dependency>
         <dependency>
-            <groupId>org.elasticsearch</groupId>
-            <artifactId>elasticsearch</artifactId>
-            <version>${global_elasticsearch_version}</version>
-        </dependency>
-        <dependency>
             <groupId>org.hamcrest</groupId>
             <artifactId>hamcrest-all</artifactId>
             <version>1.3</version>
@@ -248,25 +240,7 @@
             <version>2.1.14</version>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>org.elasticsearch</groupId>
-            <artifactId>elasticsearch</artifactId>
-            <version>${global_elasticsearch_version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.lucene</groupId>
-            <artifactId>lucene-test-framework</artifactId>
-            <version>${lucene.test.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.lucene</groupId>
-            <artifactId>lucene-core</artifactId>
-            <version>${lucene.test.version}</version>
-        </dependency>        
-        <dependency>
+      <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-hdfs</artifactId>
             <version>${global_hadoop_version}</version>
@@ -371,6 +345,7 @@
             <scope>test</scope>
         </dependency>
     </dependencies>
+
     <build>
         <plugins>
             <plugin>

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPruner.java
----------------------------------------------------------------------
diff --git 
a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPruner.java
 
b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPruner.java
deleted file mode 100644
index ce54345..0000000
--- 
a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPruner.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.dataloads.bulk;
-
-import com.google.common.base.Predicate;
-import com.google.common.collect.Iterables;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.Iterator;
-import org.apache.commons.collections.IteratorUtils;
-import org.apache.metron.common.configuration.Configuration;
-import org.elasticsearch.client.AdminClient;
-import org.elasticsearch.client.Client;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.collect.ImmutableOpenMap;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ElasticsearchDataPruner extends DataPruner {
-
-    private static final Logger LOG = 
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-    private static final String defaultDateFormat = "yyyy.MM.dd.HH";
-    private String indexPattern;
-    private SimpleDateFormat dateFormat;
-    protected Client indexClient = null;
-    protected Configuration configuration;
-
-    private Predicate<String> filterWithRegex = new Predicate<String>() {
-
-        @Override
-        public boolean apply(String str) {
-
-            try {
-                String dateString = str.substring(indexPattern.length());
-                Date indexCreateDate = dateFormat.parse(dateString);
-                long indexCreatedDate = indexCreateDate.getTime();
-                if (indexCreatedDate >= firstTimeMillis && indexCreatedDate < 
lastTimeMillis) {
-                    return true;
-                }
-            } catch (ParseException e) {
-                LOG.error("Unable to parse date from {}", 
str.substring(indexPattern.length()), e);
-            }
-
-            return false;
-        }
-
-    };
-
-    public ElasticsearchDataPruner(Date startDate, Integer 
numDays,Configuration configuration, Client indexClient, String indexPattern) 
throws Exception {
-
-        super(startDate, numDays, indexPattern);
-
-        this.indexPattern = indexPattern;
-        this.dateFormat = new SimpleDateFormat(defaultDateFormat);
-        this.configuration = configuration;
-        this.indexClient = indexClient;
-
-
-    }
-
-    @Override
-    public Long prune() throws IOException {
-
-        try {
-
-            configuration.update();
-
-        }
-        catch(Exception e) {
-            LOG.error("Unable to update configs",e);
-        }
-
-        String dateString = 
configuration.getGlobalConfig().get("es.date.format").toString();
-
-        if( null != dateString ){
-            dateFormat = new SimpleDateFormat(dateString);
-        }
-
-        ImmutableOpenMap<String, IndexMetaData> allIndices = 
indexClient.admin().cluster().prepareState().get().getState().getMetaData().getIndices();
-        Iterable indicesForDeletion = getFilteredIndices(allIndices);
-        Object[] indexArray = 
IteratorUtils.toArray(indicesForDeletion.iterator());
-
-        if(indexArray.length > 0) {
-            String[] indexStringArray = new String[indexArray.length];
-            System.arraycopy(indexArray, 0, indexStringArray, 0, 
indexArray.length);
-            deleteIndex(indexClient.admin(), indexStringArray);
-        }
-
-        return (long) indexArray.length;
-
-    }
-
-    public boolean deleteIndex(AdminClient adminClient, String... index) {
-
-        boolean isAcknowledged = 
adminClient.indices().delete(adminClient.indices().prepareDelete(index).request()).actionGet().isAcknowledged();
-        return isAcknowledged;
-
-    }
-
-    protected Iterable<String> getFilteredIndices(ImmutableOpenMap<String, 
IndexMetaData> indices) {
-
-        String[] returnedIndices = new String[indices.size()];
-        Iterator it = indices.keysIt();
-        System.arraycopy(IteratorUtils.toArray(it), 0, returnedIndices, 0, 
returnedIndices.length);
-        Iterable<String> matches = 
Iterables.filter(Arrays.asList(returnedIndices), filterWithRegex);
-
-        return matches;
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunner.java
----------------------------------------------------------------------
diff --git 
a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunner.java
 
b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunner.java
deleted file mode 100644
index 5d2f0f1..0000000
--- 
a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunner.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.dataloads.bulk;
-
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.net.InetAddress;
-import java.nio.file.Paths;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Map;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.PosixParser;
-import org.apache.curator.RetryPolicy;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.metron.common.configuration.Configuration;
-import org.apache.metron.common.utils.ErrorUtils;
-import org.elasticsearch.client.transport.TransportClient;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.transport.InetSocketTransportAddress;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ElasticsearchDataPrunerRunner {
-
-    private static final Logger LOG = 
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-    public static void main(String... argv) throws IOException, 
java.text.ParseException, ClassNotFoundException, InterruptedException {
-
-        /**
-         * Example
-         * start=$(date -d '30 days ago' +%m/%d/%Y)
-         * yarn jar Metron-DataLoads-{VERSION}.jar 
org.apache.metron.dataloads.bulk.ElasticsearchDataPrunerRunner -i host1:9300 -p 
'/bro_index_' -s $(date -d '30 days ago' +%m/%d/%Y) -n 1;
-         * echo ${start}
-         **/
-
-        Options options = buildOptions();
-        Options help = new Options();
-        TransportClient client = null;
-
-        Option o = new Option("h", "help", false, "This screen");
-        o.setRequired(false);
-        help.addOption(o);
-
-
-
-        try {
-
-            CommandLine cmd = checkOptions(help,options, argv);
-
-            String start = cmd.getOptionValue("s");
-            Date startDate = new SimpleDateFormat("MM/dd/yyyy").parse(start);
-
-            Integer numDays = Integer.parseInt(cmd.getOptionValue("n"));
-            String indexPrefix = cmd.getOptionValue("p");
-
-            LOG.debug("Running prune with args: {} {}", startDate, numDays);
-
-            Configuration configuration = null;
-
-            if( cmd.hasOption("z")){
-
-                RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
-                CuratorFramework framework = 
CuratorFrameworkFactory.newClient(cmd.getOptionValue("z"),retryPolicy);
-                framework.start();
-                configuration = new Configuration(framework);
-
-            } else if ( cmd.hasOption("c") ){
-
-                String resourceFile = cmd.getOptionValue("c");
-                configuration = new Configuration(Paths.get(resourceFile));
-
-            } else {
-                ErrorUtils.RuntimeErrors.ILLEGAL_ARG.throwRuntime("Unable to 
finish setting up configuration - z or c option is required.");
-            }
-
-            configuration.update();
-
-            Map<String, Object> globalConfiguration = 
configuration.getGlobalConfig();
-
-            Settings.Builder settingsBuilder = Settings.settingsBuilder();
-            settingsBuilder.put("cluster.name", 
globalConfiguration.get("es.clustername"));
-            
settingsBuilder.put("curatorFramework.transport.ping_timeout","500s");
-            Settings settings = settingsBuilder.build();
-            client = TransportClient.builder().settings(settings).build()
-                    .addTransportAddress(
-                            new 
InetSocketTransportAddress(InetAddress.getByName(globalConfiguration.get("es.ip").toString()),
 Integer.parseInt(globalConfiguration.get("es.port").toString()) )
-                    );
-
-            DataPruner pruner = new ElasticsearchDataPruner(startDate, 
numDays, configuration, client, indexPrefix);
-
-            LOG.info("Pruned {} indices from {}:{}/{}", pruner.prune(), 
globalConfiguration.get("es.ip"), globalConfiguration.get("es.port"), 
indexPrefix);
-        } catch (Exception e) {
-
-            e.printStackTrace();
-            System.exit(-1);
-
-        } finally {
-
-            if( null != client) {
-                client.close();
-            }
-
-        }
-
-    }
-
-    public static CommandLine checkOptions(Options help, Options options, 
String ... argv) throws ParseException {
-
-        CommandLine cmd = null;
-        CommandLineParser parser = new PosixParser();
-
-
-        try {
-
-            cmd = parser.parse(help,argv,true);
-
-            if( cmd.getOptions().length > 0){
-                final HelpFormatter usageFormatter = new HelpFormatter();
-                usageFormatter.printHelp("ElasticsearchDataPrunerRunner", 
null, options, null, true);
-                System.exit(0);
-            }
-
-            cmd = parser.parse(options, argv);
-
-        } catch (ParseException e) {
-
-            final HelpFormatter usageFormatter = new HelpFormatter();
-            usageFormatter.printHelp("ElasticsearchDataPrunerRunner", null, 
options, null, true);
-            throw e;
-
-        }
-
-
-        if( (cmd.hasOption("z") && cmd.hasOption("c")) || (!cmd.hasOption("z") 
&& !cmd.hasOption("c")) ){
-
-            System.err.println("One (only) of zookeeper-hosts or 
config-location is required");
-            final HelpFormatter usageFormatter = new HelpFormatter();
-            usageFormatter.printHelp("ElasticsearchDataPrunerRunner", null, 
options, null, true);
-            throw new RuntimeException("Must specify zookeeper-hosts or 
config-location, but not both");
-
-        }
-
-        return cmd;
-    }
-
-    public static Options buildOptions(){
-
-        Options options = new Options();
-
-        Option o = new Option("s", "start-date", true, "Starting Date 
(MM/DD/YYYY)");
-        o.setArgName("START_DATE");
-        o.setRequired(true);
-        options.addOption(o);
-
-        o = new Option("n", "numdays", true, "Number of days back to purge");
-        o.setArgName("NUMDAYS");
-        o.setRequired(true);
-        options.addOption(o);
-
-        o = new Option("p", "index-prefix", true, "Index prefix  - e.g. 
bro_index_");
-        o.setArgName("PREFIX");
-        o.setRequired(true);
-        options.addOption(o);
-
-        o = new Option("c", "config-location", true, "Directory Path - e.g. 
/path/to/config/dir");
-        o.setArgName("CONFIG");
-        o.setRequired(false);
-        options.addOption(o);
-
-        o = new Option("z", "zookeeper-hosts", true, "Zookeeper URL - e.g. 
zkhost1:2181,zkhost2:2181,zkhost3:2181");
-        o.setArgName("PREFIX");
-        o.setRequired(false);
-        options.addOption(o);
-
-        return options;
-    }
-}

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-platform/metron-data-management/src/main/scripts/prune_elasticsearch_indices.sh
----------------------------------------------------------------------
diff --git 
a/metron-platform/metron-data-management/src/main/scripts/prune_elasticsearch_indices.sh
 
b/metron-platform/metron-data-management/src/main/scripts/prune_elasticsearch_indices.sh
index c3f1d05..f891fa3 100644
--- 
a/metron-platform/metron-data-management/src/main/scripts/prune_elasticsearch_indices.sh
+++ 
b/metron-platform/metron-data-management/src/main/scripts/prune_elasticsearch_indices.sh
@@ -17,5 +17,6 @@
 # limitations under the License.
 #
 
-yarn jar 
/usr/metron/${project.version}/lib/${project.artifactId}-${project.version}.jar 
org.apache.metron.dataloads.bulk.ElasticsearchDataPrunerRunner "$@"
+echo "The Metron Elasticsearch data pruner has been deprecated in favor of the 
Curator framework."
+echo "See 
https://www.elastic.co/guide/en/elasticsearch/client/curator/5.4/index.html";
 

http://git-wip-us.apache.org/repos/asf/metron/blob/e8213918/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunnerTest.java
----------------------------------------------------------------------
diff --git 
a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunnerTest.java
 
b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunnerTest.java
deleted file mode 100644
index 5f32bee..0000000
--- 
a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunnerTest.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.dataloads.bulk;
-
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.ByteArrayOutputStream;
-import java.io.FileDescriptor;
-import java.io.FileOutputStream;
-import java.io.PrintStream;
-
-public class ElasticsearchDataPrunerRunnerTest {
-
-    private Options options;
-    private Options help;
-
-    private ByteArrayOutputStream outContent;
-    private ByteArrayOutputStream errContent;
-
-    @Before
-    public void setUp(){
-
-        options = ElasticsearchDataPrunerRunner.buildOptions();
-        help = new Options();
-
-        Option o = new Option("h", "help", false, "This screen");
-        o.setRequired(false);
-        help.addOption(o);
-
-        outContent = new ByteArrayOutputStream();
-        errContent = new ByteArrayOutputStream();
-
-        System.setOut(new PrintStream(outContent));
-        System.setErr(new PrintStream(errContent));
-
-    }
-
-    @Test(expected = RuntimeException.class)
-    public void testThrowsWithoutZookeeperOrConfigLocation() throws Exception {
-
-        String[] args = new 
String[]{"-n","30","-p","sensor_index","-s","03/30/2016"};
-        ElasticsearchDataPrunerRunner.checkOptions(help,options,args);
-
-    }
-
-    @Test(expected = RuntimeException.class)
-    public void testThrowsWithZookeeperAndConfiguration() throws Exception {
-
-        String[] args = new 
String[]{"-n","30","-p","sensor_index","-s","03/30/2016"};
-        ElasticsearchDataPrunerRunner.checkOptions(help,options,args);
-
-    }
-
-}

Reply via email to