http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
new file mode 100644
index 0000000..984e834
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.doc;
+
+public class DocConstants {
+
+  public class CommonDescriptions {
+    public static final String X_AXIS_D = "The column which can be value for 
x-axis in graph formation";
+    public static final String Y_AXIS_D = "The column which can be value for 
y-axis in graph formation";
+    public static final String STACK_BY_D = "The graph property for stacking 
the plot";
+    public static final String EXCLUDE_QUERY_D = "Exclude the values in query 
result e.g.: [{message:*timeout*}]";
+    public static final String INCLUDE_QUERY_D = "Include the values in query 
result e.g.: [{message:*exception*}]";
+    public static final String MUST_BE_D = "Include the components, comma 
separated values";
+    public static final String MUST_NOT_D = "Exclude the components, comma 
separated values";
+    public static final String FROM_D = "Date range param, start date";
+    public static final String TO_D = "Date range param, end date";
+    public static final String START_TIME_D = "Date range param which is 
suportted from browser url";
+    public static final String END_TIME_D = "Date range param which is 
supported from browser url";
+    public static final String START_INDEX_D = "Start index of the queried 
result";
+    public static final String SORT_TYPE_D = "Type of sorting (osc, desc)";
+    public static final String SORT_BY_D = "Sorting the results based on this 
field";
+    public static final String PAGE_D = "Number of pages for the results";
+    public static final String PAGE_SIZE_D = "Page size of the results";
+    public static final String UNIT_D = "Aggregate the data with time gap as 
unit i.e 1MINUTE";
+    public static final String QUERY_D = "not required";
+    public static final String I_MESSAGE_D = "Include query which will query 
against message column";
+    public static final String E_MESSAGE_D = "Exclude query which will query 
against message column";
+    public static final String IS_LAST_PAGE_D = "Show last page (true/false)";
+    public static final String FIELD_D = "Get values for particular field";
+    public static final String FORMAT_D = "File Export format, can be 'txt' or 
'json'";
+    public static final String TOP = "Number that defines how many top element 
you would like to see.";
+  }
+
+  public class AuditOperationDescriptions {
+    public static final String GET_AUDIT_SCHEMA_FIELD_LIST_OD = "Get list of 
schema fields in audit collection";
+    public static final String GET_AUDIT_LOGS_OD = "Get the list of logs 
details";
+    public static final String PURGE_AUDIT_LOGS_OD = "Purge service logs based 
by criteria";
+    public static final String GET_AUDIT_COMPONENTS_OD = "Get the list of 
audit components currently active or having data in Solr";
+    public static final String GET_AUDIT_LINE_GRAPH_DATA_OD = "Get the data 
required for line graph";
+    public static final String GET_TOP_AUDIT_RESOURCES_OD = "Get the top audit 
resource count (grouped by type)";
+    public static final String EXPORT_USER_TALBE_TO_TEXT_FILE_OD = "Export the 
tables shown on Audit tab";
+    public static final String GET_SERVICE_LOAD_OD = "The graph for showing 
the top users accessing the services";
+  }
+
+  public class ServiceDescriptions {
+    public static final String LEVEL_D = "filter for log level";
+    public static final String BUNDLE_ID = "filter for host";
+    public static final String CLUSTER_D = "filter for clusters (comma 
separated list)";
+    public static final String FILE_NAME_D = "File name filter which is 
supported from browser url";
+    public static final String HOST_NAME_D = "Host name filter which is 
supported from browser url";
+    public static final String COMPONENT_NAME_D = "Component name filter which 
is supported from browser url";
+    public static final String FIND_D = "Finding particular text on subsequent 
pages in case of table view with pagination";
+    public static final String ID_D = "Log id value for traversing to that 
particular record with that log id";
+    public static final String KEYWORD_TYPE_D = "Serching the find param value 
in previous or next in paginated table";
+    public static final String TOKEN_D = "unique number used along with 
FIND_D. The request can be canceled using this token";
+    public static final String SOURCE_LOG_ID_D = "fetch the record set having 
that log Id";
+    public static final String NUMBER_ROWS_D = "Getting rows after particular 
log entry - used in 'Preview' option";
+    public static final String SCROLL_TYPE_D = "Used in 'Preview' feature for 
getting records 'after' or 'before'";
+    public static final String UTC_OFFSET_D = "timezone offset";
+    public static final String HOST_PARAMS_D = "filter for hosts";
+  }
+
+  public class ServiceOperationDescriptions {
+    public static final String SEARCH_LOGS_OD = "Searching logs entry";
+    public static final String PURGE_LOGS_OD = "Purge service logs based by 
criteria";
+    public static final String GET_HOSTS_OD = "Get the list of service hosts 
currently active or having data in Solr";
+    public static final String GET_COMPONENTS_OD = "Get the list of service 
components currently active or having data in Solr";
+    public static final String GET_AGGREGATED_INFO_OD = "not required";
+    public static final String GET_LOG_LEVELS_COUNT_OD = "Get Log levels with 
their counts";
+    public static final String GET_COMPONENTS_COUNT_OD = "Get components with 
their counts";
+    public static final String GET_HOSTS_COUNT_OD = "Get hosts with their 
counts";
+    public static final String GET_TREE_EXTENSION_OD = "Get host and 
compoenets hierarchy with log counts";
+    public static final String GET_HISTOGRAM_DATA_OD = "Get data for 
histogram";
+    public static final String EXPORT_TO_TEXT_FILE_OD = "Export the table data 
in file";
+    public static final String GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD = "Get 
components with log level distribution count";
+    public static final String GET_ANY_GRAPH_COUNT_DATA_OD = "Get the data 
generic enough to use for graph plots (yAzis is always count)";
+    public static final String GET_HOST_LIST_BY_COMPONENT_OD = "Get host list 
of components";
+    public static final String GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD = "Get 
service logs schema fields";
+    public static final String GET_HADOOP_SERVICE_CONFIG_JSON_OD = "Get the 
json having meta data of services supported by logsearch";
+    public static final String GET_AFTER_BEFORE_LOGS_OD = "Preview feature 
data";
+    public static final String REQUEST_CANCEL = "Cancel an ongoing solr 
request";
+    public static final String GET_HOST_LOGFILES_OD = "Get the log files of 
the components of a host";
+  }
+
+  public class PublicOperationDescriptions {
+    public static final String OBTAIN_GENERAL_CONFIG_OD = "Obtain general 
config";
+  }
+
+  public class UserConfigDescriptions {
+    public static final String FILTER_NAME_D = "The saved query as filter in 
Solr, search is sopprted by this param";
+    public static final String ROW_TYPE_D = "Row type is solr to identify as 
filter query";
+  }
+
+  public class UserConfigOperationDescriptions {
+    public static final String SAVE_USER_CONFIG_OD = "Save user config";
+    public static final String DELETE_USER_CONFIG_OD = "Delete user config";
+    public static final String GET_USER_CONFIG_OD = "Get user config";
+    public static final String GET_USER_FILTER_OD = "Get user filter";
+    public static final String UPDATE_USER_FILTER_OD = "Update user filter";
+    public static final String GET_ALL_USER_NAMES_OD = "Get all user names";
+  }
+
+  public class StatusOperationDescriptions {
+    public static final String STATUS_OD = "Get statuses for collections (not 
health state - show true if something already done)";
+    public static final String SERVICE_LOGS_STATUS_OD = "Get statuses for 
service log collection (not health state - show true if something already 
done)";
+    public static final String AUDIT_LOGS_STATUS_OD = "Get statuses for 
collections (not health state - show true if something already done)";
+    public static final String USER_CONFIG_STATUS_OD = "Get statuses for 
userconfig collection (not health state - show true if something already done)";
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ACLHandler.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ACLHandler.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ACLHandler.java
new file mode 100644
index 0000000..fde176f
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ACLHandler.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.handler;
+
+import org.apache.ambari.logsearch.conf.SolrPropsConfig;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.SolrZooKeeper;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+public class ACLHandler implements SolrZkRequestHandler<Boolean> {
+
+  private static final Logger LOG = LoggerFactory.getLogger(ACLHandler.class);
+
+  @Override
+  public Boolean handle(CloudSolrClient solrClient, SolrPropsConfig 
solrPropsConfig) throws Exception {
+    List<ACL> aclsToSetList = solrPropsConfig.getZkAcls();
+    if (CollectionUtils.isNotEmpty(aclsToSetList)) {
+      LOG.info("Setting acls for '{}' collection...", 
solrPropsConfig.getCollection());
+      SolrZkClient zkClient = solrClient.getZkStateReader().getZkClient();
+      SolrZooKeeper solrZooKeeper = zkClient.getSolrZooKeeper();
+      String collectionPath = String.format("/collections/%s", 
solrPropsConfig.getCollection());
+      String configsPath = String.format("/configs/%s", 
solrPropsConfig.getConfigName());
+      List<ACL> collectionAcls = solrZooKeeper.getACL(collectionPath, new 
Stat());
+      if (isRefreshAclsNeeded(aclsToSetList, collectionAcls)) {
+        LOG.info("Acls differs for {}, update acls.", collectionPath);
+        setRecursivelyOn(solrZooKeeper, collectionPath, aclsToSetList);
+      }
+      List<ACL> configsAcls = solrZooKeeper.getACL(configsPath, new Stat());
+      if (isRefreshAclsNeeded(aclsToSetList, configsAcls)) {
+        LOG.info("Acls differs for {}, update acls.", configsPath);
+        setRecursivelyOn(solrZooKeeper, configsPath, aclsToSetList);
+      }
+    }
+    return true;
+  }
+
+  private boolean isRefreshAclsNeeded(List<ACL> acls, List<ACL> newAcls) {
+    boolean result = false;
+    if (acls != null) {
+      if (acls.size() != newAcls.size()) {
+        return true;
+      }
+      result = aclDiffers(acls, newAcls);
+      if (!result) {
+        result = aclDiffers(newAcls, acls);
+      }
+    }
+    return result;
+  }
+
+  private boolean aclDiffers(List<ACL> aclList1, List<ACL> aclList2) {
+    for (ACL acl : aclList1) {
+      for (ACL newAcl : aclList2) {
+        if (acl.getId() != null && 
acl.getId().getId().equals(newAcl.getId().getId())
+          && acl.getPerms() != newAcl.getPerms()) {
+          LOG.info("ACL for '{}' differs: '{}' on znode, should be '{}'",
+            acl.getId().getId(), acl.getPerms(), newAcl.getPerms());
+          return true;
+        }
+      }
+    }
+    return false;
+  }
+
+  private void setRecursivelyOn(SolrZooKeeper solrZooKeeper, String node, 
List<ACL> acls)
+    throws KeeperException, InterruptedException {
+    solrZooKeeper.setACL(node, acls, -1);
+    for (String child : solrZooKeeper.getChildren(node, null)) {
+      String path = node.endsWith("/") ? node + child : node + "/" + child;
+      setRecursivelyOn(solrZooKeeper, path, acls);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/CreateCollectionHandler.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/CreateCollectionHandler.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/CreateCollectionHandler.java
new file mode 100644
index 0000000..752a1e1
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/CreateCollectionHandler.java
@@ -0,0 +1,222 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.handler;
+
+import org.apache.ambari.logsearch.conf.SolrPropsConfig;
+import org.apache.commons.lang.StringUtils;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpClientUtil;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+
+import static 
org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.ROUTER_FIELD;
+
+public class CreateCollectionHandler implements SolrZkRequestHandler<Boolean> {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(CreateCollectionHandler.class);
+
+  private static final String MODIFY_COLLECTION_QUERY = 
"/admin/collections?action=MODIFYCOLLECTION&collection=%s&%s=%d";
+  private static final String MAX_SHARDS_PER_NODE = "maxShardsPerNode";
+
+  private List<String> allCollectionList;
+
+  public CreateCollectionHandler(List<String> allCollectionList) {
+    this.allCollectionList = allCollectionList;
+  }
+
+  @Override
+  public Boolean handle(CloudSolrClient solrClient, SolrPropsConfig 
solrPropsConfig) throws Exception {
+    boolean result;
+    if (solrPropsConfig.getSplitInterval().equalsIgnoreCase("none")) {
+      result = createCollection(solrClient, solrPropsConfig, 
this.allCollectionList);
+    } else {
+      result = setupCollectionsWithImplicitRouting(solrClient, 
solrPropsConfig, this.allCollectionList);
+    }
+    return result;
+  }
+
+  private boolean setupCollectionsWithImplicitRouting(CloudSolrClient 
solrClient, SolrPropsConfig solrPropsConfig, List<String> allCollectionList)
+    throws Exception {
+    LOG.info("setupCollectionsWithImplicitRouting(). collectionName=" + 
solrPropsConfig.getCollection()
+      + ", numberOfShards=" + solrPropsConfig.getNumberOfShards());
+
+    // Default is true, because if the collection and shard is already there, 
then it will return true
+    boolean returnValue = true;
+
+    List<String> shardsList = new ArrayList<String>();
+    for (int i = 0; i < solrPropsConfig.getNumberOfShards(); i++) {
+      shardsList.add("shard" + i);
+    }
+    String shardsListStr = StringUtils.join(shardsList, ',');
+
+    // Check if collection is already in zookeeper
+    if (!allCollectionList.contains(solrPropsConfig.getCollection())) {
+      LOG.info("Creating collection " + solrPropsConfig.getCollection() + ", 
shardsList=" + shardsList);
+      CollectionAdminRequest.Create collectionCreateRequest = new 
CollectionAdminRequest.Create();
+      
collectionCreateRequest.setCollectionName(solrPropsConfig.getCollection());
+      collectionCreateRequest.setRouterName("implicit");
+      collectionCreateRequest.setShards(shardsListStr);
+      
collectionCreateRequest.setNumShards(solrPropsConfig.getNumberOfShards());
+      
collectionCreateRequest.setReplicationFactor(solrPropsConfig.getReplicationFactor());
+      collectionCreateRequest.setConfigName(solrPropsConfig.getConfigName());
+      collectionCreateRequest.setRouterField(ROUTER_FIELD);
+      
collectionCreateRequest.setMaxShardsPerNode(solrPropsConfig.getReplicationFactor()
 * solrPropsConfig.getNumberOfShards());
+
+      CollectionAdminResponse createResponse = 
collectionCreateRequest.process(solrClient);
+      if (createResponse.getStatus() != 0) {
+        returnValue = false;
+        LOG.error("Error creating collection. collectionName=" + 
solrPropsConfig.getCollection()
+          + ", shardsList=" + shardsList +", response=" + createResponse);
+      } else {
+        LOG.info("Created collection " + solrPropsConfig.getCollection() + ", 
shardsList=" + shardsList);
+      }
+    } else {
+      LOG.info("Collection " + solrPropsConfig.getCollection() + " is already 
there. Will check whether it has the required shards");
+      Collection<Slice> slices = getSlices(solrClient, solrPropsConfig);
+      Collection<String> existingShards = getShards(slices, solrPropsConfig);
+      if (existingShards.size() < shardsList.size()) {
+        try {
+          updateMaximumNumberOfShardsPerCore(slices, solrPropsConfig);
+        } catch (Throwable t) {
+          returnValue = false;
+          LOG.error(String.format("Exception during updating collection (%s)", 
t));
+        }
+      }
+      for (String shard : shardsList) {
+        if (!existingShards.contains(shard)) {
+          try {
+            LOG.info("Going to add Shard " + shard + " to collection " + 
solrPropsConfig.getCollection());
+            CollectionAdminRequest.CreateShard createShardRequest = new 
CollectionAdminRequest.CreateShard();
+            
createShardRequest.setCollectionName(solrPropsConfig.getCollection());
+            createShardRequest.setShardName(shard);
+            CollectionAdminResponse response = 
createShardRequest.process(solrClient);
+            if (response.getStatus() != 0) {
+              LOG.error("Error creating shard " + shard + " in collection " + 
solrPropsConfig.getCollection() + ", response=" + response);
+              returnValue = false;
+              break;
+            } else {
+              LOG.info("Successfully created shard " + shard + " in collection 
" + solrPropsConfig.getCollection());
+            }
+          } catch (Throwable t) {
+            LOG.error("Error creating shard " + shard + " in collection " + 
solrPropsConfig.getCollection(), t);
+            returnValue = false;
+            break;
+          }
+        }
+      }
+    }
+    return returnValue;
+  }
+
+  private boolean createCollection(CloudSolrClient solrClient, SolrPropsConfig 
solrPropsConfig, List<String> allCollectionList) throws SolrServerException, 
IOException {
+
+    if (allCollectionList.contains(solrPropsConfig.getCollection())) {
+      LOG.info("Collection " + solrPropsConfig.getCollection() + " is already 
there. Won't create it");
+      return true;
+    }
+
+    LOG.info("Creating collection " + solrPropsConfig.getCollection() + ", 
numberOfShards=" + solrPropsConfig.getNumberOfShards() +
+      ", replicationFactor=" + solrPropsConfig.getReplicationFactor());
+
+    CollectionAdminRequest.Create collectionCreateRequest = new 
CollectionAdminRequest.Create();
+    collectionCreateRequest.setCollectionName(solrPropsConfig.getCollection());
+    collectionCreateRequest.setNumShards(solrPropsConfig.getNumberOfShards());
+    
collectionCreateRequest.setReplicationFactor(solrPropsConfig.getReplicationFactor());
+    collectionCreateRequest.setConfigName(solrPropsConfig.getConfigName());
+    
collectionCreateRequest.setMaxShardsPerNode(calculateMaxShardsPerNode(solrPropsConfig));
+    CollectionAdminResponse createResponse = 
collectionCreateRequest.process(solrClient);
+    if (createResponse.getStatus() != 0) {
+      LOG.error("Error creating collection. collectionName=" + 
solrPropsConfig.getCollection() + ", response=" + createResponse);
+      return false;
+    } else {
+      LOG.info("Created collection " + solrPropsConfig.getCollection() + ", 
numberOfShards=" + solrPropsConfig.getNumberOfShards() +
+        ", replicationFactor=" + solrPropsConfig.getReplicationFactor());
+      return true;
+    }
+  }
+
+  private void updateMaximumNumberOfShardsPerCore(Collection<Slice> slices, 
SolrPropsConfig solrPropsConfig) throws IOException {
+    String baseUrl = getRandomBaseUrl(slices);
+    if (baseUrl != null) {
+      CloseableHttpClient httpClient = HttpClientUtil.createClient(null);
+      HttpGet request = new HttpGet(baseUrl + 
String.format(MODIFY_COLLECTION_QUERY,
+        solrPropsConfig.getCollection(), MAX_SHARDS_PER_NODE, 
calculateMaxShardsPerNode(solrPropsConfig)));
+      HttpResponse response = httpClient.execute(request);
+      if (response.getStatusLine().getStatusCode() != 
Response.Status.OK.getStatusCode()) {
+        throw new IllegalStateException(String.format("Cannot update 
collection (%s) - increase max number of nodes per core", 
solrPropsConfig.getCollection()));
+      }
+    } else {
+      throw new IllegalStateException(String.format("Cannot get any core url 
for updating collection (%s)", solrPropsConfig.getCollection()));
+    }
+  }
+
+  private Collection<Slice> getSlices(CloudSolrClient solrClient, 
SolrPropsConfig solrPropsConfig) {
+    ZkStateReader reader = solrClient.getZkStateReader();
+    return reader.getClusterState().getSlices(solrPropsConfig.getCollection());
+  }
+
+  private Collection<String> getShards(Collection<Slice> slices, 
SolrPropsConfig solrPropsConfig) {
+    Collection<String> list = new HashSet<>();
+    for (Slice slice : slices) {
+      for (Replica replica : slice.getReplicas()) {
+        LOG.info("colName=" + solrPropsConfig.getCollection() + ", 
slice.name=" + slice.getName() + ", slice.state=" + slice.getState() +
+          ", replica.core=" + replica.getStr("core") + ", replica.state=" + 
replica.getStr("state"));
+        list.add(slice.getName());
+      }
+    }
+    return list;
+  }
+
+  private String getRandomBaseUrl(Collection<Slice> slices) {
+    String coreUrl = null;
+    if (slices != null) {
+      for (Slice slice : slices) {
+        if (!slice.getReplicas().isEmpty()) {
+          Replica replica = slice.getReplicas().iterator().next();
+          coreUrl = replica.getStr("base_url");
+          if (coreUrl != null) {
+            break;
+          }
+        }
+      }
+    }
+    return coreUrl;
+  }
+
+  private Integer calculateMaxShardsPerNode(SolrPropsConfig solrPropsConfig) {
+    return solrPropsConfig.getReplicationFactor() * 
solrPropsConfig.getNumberOfShards();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ListCollectionHandler.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ListCollectionHandler.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ListCollectionHandler.java
new file mode 100644
index 0000000..124ce40
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ListCollectionHandler.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.handler;
+
+import org.apache.ambari.logsearch.conf.SolrPropsConfig;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.common.SolrException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class ListCollectionHandler implements 
SolrZkRequestHandler<List<String>> {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(ListCollectionHandler.class);
+
+  @Override
+  public List<String> handle(CloudSolrClient solrClient, SolrPropsConfig 
solrPropsConfig) throws Exception {
+    try {
+      CollectionAdminRequest.List colListReq = new 
CollectionAdminRequest.List();
+      CollectionAdminResponse response = colListReq.process(solrClient);
+      if (response.getStatus() != 0) {
+        LOG.error("Error getting collection list from solr.  response=" + 
response);
+        return null;
+      }
+      return (List<String>) response.getResponse().get("collections");
+    } catch (SolrException e) {
+      LOG.error("getCollections() operation failed", e);
+      return new ArrayList<>();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ReloadCollectionHandler.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ReloadCollectionHandler.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ReloadCollectionHandler.java
new file mode 100644
index 0000000..52f3366
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/ReloadCollectionHandler.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.handler;
+
+import org.apache.ambari.logsearch.conf.SolrPropsConfig;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ReloadCollectionHandler implements SolrZkRequestHandler<Boolean> {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(ReloadCollectionHandler.class);
+
+  @Override
+  public Boolean handle(CloudSolrClient solrClient, SolrPropsConfig 
solrPropsConfig) throws Exception {
+    boolean result = false;
+    try {
+      LOG.info("Reload collection - '{}'", solrPropsConfig.getCollection());
+      CollectionAdminRequest.Reload reloadCollectionRequest = new 
CollectionAdminRequest.Reload();
+      
reloadCollectionRequest.setCollectionName(solrPropsConfig.getCollection());
+      reloadCollectionRequest.process(solrClient);
+      result = true;
+    } catch (Exception e) {
+      LOG.error(String.format("Reload collection ('%s') failed.", 
solrPropsConfig.getCollection()), e);
+    }
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/SolrZkRequestHandler.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/SolrZkRequestHandler.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/SolrZkRequestHandler.java
new file mode 100644
index 0000000..85ae6cb
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/SolrZkRequestHandler.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.handler;
+
+import org.apache.ambari.logsearch.conf.SolrPropsConfig;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+
+interface SolrZkRequestHandler<T> {
+  T handle(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig) throws 
Exception;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java
new file mode 100644
index 0000000..27a6705
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/handler/UploadConfigurationHandler.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.handler;
+
+import org.apache.ambari.logsearch.conf.SolrPropsConfig;
+import org.apache.commons.configuration.XMLConfiguration;
+import org.apache.commons.io.FileUtils;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkConfigManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.FileSystems;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+
+public class UploadConfigurationHandler implements 
SolrZkRequestHandler<Boolean> {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(UploadConfigurationHandler.class);
+
+  private static final String SCHEMA_FILE = "managed-schema";
+  private static final String SOLR_CONFIG_FILE = "solrconfig.xml";
+  private static final String FIELD_NAME_PATH = "field[@name]";
+  private static final String FIELD_TYPE_NAME_PATH = "fieldType[@name]";
+  private static final String DYNAMIC_FIELD_NAME_PATH = "dynamicField[@name]";
+
+  private File configSetFolder;
+
+  public UploadConfigurationHandler(File configSetFolder) {
+    this.configSetFolder = configSetFolder;
+  }
+
+  @Override
+  public Boolean handle(CloudSolrClient solrClient, SolrPropsConfig 
solrPropsConfig) throws Exception {
+    boolean reloadCollectionNeeded = false;
+    String separator = FileSystems.getDefault().getSeparator();
+    String downloadFolderLocation = String.format("%s%s%s%s%s", 
System.getProperty("java.io.tmpdir"), separator,
+      UUID.randomUUID().toString(), separator, 
solrPropsConfig.getConfigName());
+    solrClient.connect();
+    SolrZkClient zkClient = solrClient.getZkStateReader().getZkClient();
+    File tmpDir = new File(downloadFolderLocation);
+    try {
+      ZkConfigManager zkConfigManager = new ZkConfigManager(zkClient);
+      boolean configExists = 
zkConfigManager.configExists(solrPropsConfig.getConfigName());
+      if (configExists) {
+        LOG.info("Config set exists for '{}' collection. Refreshing it if 
needed...", solrPropsConfig.getCollection());
+        if (!tmpDir.mkdirs()) {
+          LOG.error("Cannot create directories for '{}'", 
tmpDir.getAbsolutePath());
+        }
+        zkConfigManager.downloadConfigDir(solrPropsConfig.getConfigName(), 
Paths.get(downloadFolderLocation));
+        File[] listOfFiles = configSetFolder.listFiles();
+        if (listOfFiles != null) {
+          for (File file : listOfFiles) {
+            if (file.getName().equals(SOLR_CONFIG_FILE) && 
!FileUtils.contentEquals(file, new File(String.format("%s%s%s", 
downloadFolderLocation, separator, file.getName())))) {
+              LOG.info("Solr config file differs ('{}'), upload config set to 
zookeeper", file.getName());
+              zkConfigManager.uploadConfigDir(configSetFolder.toPath(), 
solrPropsConfig.getConfigName());
+              reloadCollectionNeeded = true;
+              break;
+            }
+            if (file.getName().equals(SCHEMA_FILE) && 
localSchemaFileHasMoreFields(file, new File(String.format("%s%s%s", 
downloadFolderLocation, separator, file.getName())))) {
+              LOG.info("Solr schema file differs ('{}'), upload config set to 
zookeeper", file.getName());
+              zkConfigManager.uploadConfigDir(configSetFolder.toPath(), 
solrPropsConfig.getConfigName());
+              reloadCollectionNeeded = true;
+              break;
+            }
+          }
+        }
+      } else {
+        LOG.info("Config set does not exist for '{}' collection. Uploading it 
to zookeeper...", solrPropsConfig.getCollection());
+        File[] listOfFiles = configSetFolder.listFiles();
+        if (listOfFiles != null) {
+          zkConfigManager.uploadConfigDir(configSetFolder.toPath(), 
solrPropsConfig.getConfigName());
+        }
+      }
+    } catch (Exception e) {
+      throw new RuntimeException(String.format("Cannot upload configurations 
to zk. (collection: %s, config set folder: %s)",
+        solrPropsConfig.getCollection(), 
solrPropsConfig.getConfigSetFolder()), e);
+    } finally {
+      if (tmpDir.exists()) {
+        try {
+          FileUtils.deleteDirectory(tmpDir);
+        } catch (IOException e){
+          LOG.error("Cannot delete temp directory.", e);
+        }
+      }
+    }
+    return reloadCollectionNeeded;
+  }
+
+  private boolean localSchemaFileHasMoreFields(File localFile, File 
downloadedFile) {
+    try {
+      XMLConfiguration localFileXml = new XMLConfiguration(localFile);
+      XMLConfiguration downloadedFileXml = new 
XMLConfiguration(downloadedFile);
+
+      List<String> localFieldNames = (ArrayList<String>) 
localFileXml.getProperty(FIELD_NAME_PATH);
+      List<String> localFieldTypes = (ArrayList<String>) 
localFileXml.getProperty(FIELD_TYPE_NAME_PATH);
+      List<String> localDynamicFields = (ArrayList<String>) 
localFileXml.getProperty(DYNAMIC_FIELD_NAME_PATH);
+
+      List<String> fieldNames = (ArrayList<String>) 
downloadedFileXml.getProperty(FIELD_NAME_PATH);
+      List<String> fieldTypes = (ArrayList<String>) 
downloadedFileXml.getProperty(FIELD_TYPE_NAME_PATH);
+      List<String> dynamicFields = (ArrayList<String>) 
downloadedFileXml.getProperty(DYNAMIC_FIELD_NAME_PATH);
+
+      boolean fieldNameHasDiff = hasMoreFields(localFieldNames, fieldNames, 
FIELD_NAME_PATH);
+      boolean fieldTypeHasDiff = hasMoreFields(localFieldTypes, fieldTypes, 
FIELD_TYPE_NAME_PATH);
+      boolean dynamicFieldNameHasDiff = hasMoreFields(localDynamicFields, 
dynamicFields, DYNAMIC_FIELD_NAME_PATH);
+
+      return fieldNameHasDiff || fieldTypeHasDiff || dynamicFieldNameHasDiff;
+    } catch (Exception e) {
+      throw new RuntimeException("Exception during schema xml parsing.", e);
+    }
+  }
+
+  private boolean hasMoreFields(List<String> localFields, List<String> fields, 
String tag) {
+    boolean result = false;
+    if (localFields != null) {
+      if (fields == null) {
+        result = true;
+      } else {
+        localFields.removeAll(fields);
+        if (!localFields.isEmpty()) {
+          result = true;
+        }
+      }
+    }
+    if (result) {
+      LOG.info("Found new fields or field types in local schema file.: {} 
({})", localFields.toString(), tag);
+    }
+    return result;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
new file mode 100644
index 0000000..49465d5
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
@@ -0,0 +1,223 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.manager;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import freemarker.template.Configuration;
+import freemarker.template.Template;
+import freemarker.template.TemplateException;
+
+import org.apache.ambari.logsearch.common.LogType;
+import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.common.StatusMessage;
+import org.apache.ambari.logsearch.dao.AuditSolrDao;
+import org.apache.ambari.logsearch.dao.SolrSchemaFieldDao;
+import org.apache.ambari.logsearch.model.request.impl.AuditBarGraphRequest;
+import org.apache.ambari.logsearch.model.request.impl.AuditComponentRequest;
+import org.apache.ambari.logsearch.model.request.impl.AuditLogRequest;
+import org.apache.ambari.logsearch.model.request.impl.AuditServiceLoadRequest;
+import org.apache.ambari.logsearch.model.request.impl.FieldAuditLogRequest;
+import org.apache.ambari.logsearch.model.request.impl.UserExportRequest;
+import org.apache.ambari.logsearch.model.response.AuditLogResponse;
+import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse;
+import org.apache.ambari.logsearch.model.response.GroupListResponse;
+import org.apache.ambari.logsearch.model.response.LogData;
+import org.apache.ambari.logsearch.solr.ResponseDataGenerator;
+import org.apache.ambari.logsearch.solr.SolrConstants;
+import org.apache.ambari.logsearch.solr.model.SolrAuditLogData;
+import org.apache.ambari.logsearch.solr.model.SolrComponentTypeLogData;
+import org.apache.ambari.logsearch.util.DownloadUtil;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.ambari.logsearch.common.VResponse;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.response.FacetField.Count;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.client.solrj.response.UpdateResponse;
+import org.springframework.core.convert.ConversionService;
+import org.springframework.data.solr.core.query.SimpleFacetQuery;
+import org.springframework.data.solr.core.query.SimpleQuery;
+
+import static 
org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT;
+
+@Named
+public class AuditLogsManager extends ManagerBase<SolrAuditLogData, 
AuditLogResponse> {
+  private static final Logger logger = 
Logger.getLogger(AuditLogsManager.class);
+
+  private static final String AUDIT_LOG_TEMPLATE = "audit_log_txt.ftl";
+
+  @Inject
+  private AuditSolrDao auditSolrDao;
+  @Inject
+  private ResponseDataGenerator responseDataGenerator;
+  @Inject
+  private ConversionService conversionService;
+  @Inject
+  private Configuration freemarkerConfiguration;
+  @Inject
+  private SolrSchemaFieldDao solrSchemaFieldDao;
+
+  public AuditLogResponse getLogs(AuditLogRequest request) {
+    String event = "/audit/logs";
+    SimpleQuery solrQuery = conversionService.convert(request, 
SimpleQuery.class);
+    if (request.isLastPage()) {
+      return getLastPage(auditSolrDao, solrQuery, event);
+    } else {
+      AuditLogResponse response = getLogAsPaginationProvided(solrQuery, 
auditSolrDao, event);
+      if (response.getTotalCount() > 0 && 
CollectionUtils.isEmpty(response.getLogList())) {
+        request.setLastPage(true);
+        solrQuery = conversionService.convert(request, SimpleQuery.class);
+        AuditLogResponse lastResponse = getLastPage(auditSolrDao, solrQuery, 
event);
+        if (lastResponse != null){
+          response = lastResponse;
+        }
+      }
+      return response;
+    }
+  }
+
+  private List<LogData> getComponents(AuditComponentRequest request) {
+    SimpleFacetQuery facetQuery = conversionService.convert(request, 
SimpleFacetQuery.class);
+    List<LogData> docList = new ArrayList<>();
+    QueryResponse queryResponse = auditSolrDao.process(facetQuery);
+    List<Count> componentsCount = 
responseDataGenerator.generateCount(queryResponse);
+
+    for (Count component : componentsCount) {
+      SolrComponentTypeLogData logData = new SolrComponentTypeLogData();
+      logData.setType(component.getName());
+      docList.add(logData);
+    }
+    return docList;
+  }
+
+  public GroupListResponse getAuditComponents(AuditComponentRequest request) {
+    GroupListResponse componentResponse = new GroupListResponse();
+    List<LogData> docList = getComponents(request);
+    componentResponse.setGroupList(docList);
+    return componentResponse;
+  }
+
+  public BarGraphDataListResponse getAuditBarGraphData(AuditBarGraphRequest 
request) {
+    SolrQuery solrQuery = conversionService.convert(request, SolrQuery.class);
+    QueryResponse response = auditSolrDao.process(solrQuery);
+    return 
responseDataGenerator.generateBarGraphDataResponseWithRanges(response, 
SolrConstants.AuditLogConstants.AUDIT_COMPONENT, true);
+  }
+
+  public BarGraphDataListResponse topResources(FieldAuditLogRequest request) {
+    SimpleFacetQuery facetQuery = conversionService.convert(request, 
SimpleFacetQuery.class);
+    QueryResponse queryResponse = auditSolrDao.process(facetQuery);
+    return 
responseDataGenerator.generateSecondLevelBarGraphDataResponse(queryResponse, 0);
+  }
+
+  public String getAuditLogsSchemaFieldsName() {
+    return 
convertObjToString(solrSchemaFieldDao.getSchemaFieldNameMap(LogType.AUDIT));
+  }
+
+  public BarGraphDataListResponse getServiceLoad(AuditServiceLoadRequest 
request) {
+    SimpleFacetQuery facetQuery = conversionService.convert(request, 
SimpleFacetQuery.class);
+    QueryResponse response = auditSolrDao.process(facetQuery);
+    return responseDataGenerator.generateBarGraphFromFieldFacet(response, 
AUDIT_COMPONENT);
+  }
+
+  public Response export(UserExportRequest request) throws TemplateException {
+    String startTime = request.getFrom();
+    String endTime = request.getTo();
+    SimpleFacetQuery facetQuery = conversionService.convert(request, 
SimpleFacetQuery.class);
+
+    startTime = startTime == null ? "" : startTime;
+    endTime = endTime == null ? "" : "_" + endTime;
+
+    String dataFormat = request.getFormat();
+
+    FileOutputStream fis = null;
+    try {
+      QueryResponse queryResponse = auditSolrDao.process(facetQuery);
+      if (queryResponse == null) {
+        VResponse response = new VResponse();
+        response.setMsgDesc("Query was not able to execute " + facetQuery);
+        throw RESTErrorUtil.createRESTException(response);
+      }
+      BarGraphDataListResponse vBarUserDataList = 
responseDataGenerator.generateSecondLevelBarGraphDataResponse(queryResponse, 0);
+      BarGraphDataListResponse vBarResourceDataList = 
responseDataGenerator.generateSecondLevelBarGraphDataResponse(queryResponse, 1);
+      String data = "";
+      if ("text".equals(dataFormat)) {
+        StringWriter stringWriter = new StringWriter();
+        Template template = 
freemarkerConfiguration.getTemplate(AUDIT_LOG_TEMPLATE);
+        Map<String, Object> models = new HashMap<>();
+        DownloadUtil.fillUserResourcesModel(models, vBarUserDataList, 
vBarResourceDataList);
+        template.process(models, stringWriter);
+        data = stringWriter.toString();
+
+      } else {
+        data = "{" + convertObjToString(vBarUserDataList) + "," + 
convertObjToString(vBarResourceDataList) + "}";
+        dataFormat = "json";
+      }
+      String fileName = "Users_Resource" + startTime + endTime + ".";
+      File file = File.createTempFile(fileName, dataFormat);
+      fis = new FileOutputStream(file);
+      fis.write(data.getBytes());
+      return Response
+        .ok(file, MediaType.APPLICATION_OCTET_STREAM)
+        .header("Content-Disposition", "attachment;filename=" + fileName + 
dataFormat)
+        .build();
+
+    } catch (IOException e) {
+      logger.error("Error during download file (audit log) " + e);
+      throw 
RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(),
 MessageEnums.ERROR_SYSTEM);
+    } finally {
+      if (fis != null) {
+        try {
+          fis.close();
+        } catch (IOException e) {
+        }
+      }
+    }
+  }
+
+  @Override
+  protected List<SolrAuditLogData> convertToSolrBeans(QueryResponse response) {
+    return response.getBeans(SolrAuditLogData.class);
+  }
+
+  @Override
+  protected AuditLogResponse createLogSearchResponse() {
+    return new AuditLogResponse();
+  }
+
+  public StatusMessage deleteLogs(AuditLogRequest request) {
+    SimpleQuery solrQuery = conversionService.convert(request, 
SimpleQuery.class);
+    UpdateResponse updateResponse = auditSolrDao.deleteByQuery(solrQuery, 
"/audit/logs");
+    return new StatusMessage(updateResponse.getStatus());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java
new file mode 100644
index 0000000..94191e0
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.manager;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonParseException;
+import com.google.gson.JsonPrimitive;
+import com.google.gson.JsonSerializationContext;
+import com.google.gson.JsonSerializer;
+
+import java.util.Date;
+
+public class JsonManagerBase {
+
+  private JsonSerializer<Date> jsonDateSerialiazer = null;
+  private JsonDeserializer<Date> jsonDateDeserialiazer = null;
+
+  public JsonManagerBase() {
+    jsonDateSerialiazer = new JsonSerializer<Date>() {
+
+      @Override
+      public JsonElement serialize(Date paramT, java.lang.reflect.Type 
paramType, JsonSerializationContext paramJsonSerializationContext) {
+        return paramT == null ? null : new JsonPrimitive(paramT.getTime());
+      }
+    };
+
+    jsonDateDeserialiazer = new JsonDeserializer<Date>() {
+
+      @Override
+      public Date deserialize(JsonElement json, java.lang.reflect.Type 
typeOfT, JsonDeserializationContext context)
+        throws JsonParseException {
+        return json == null ? null : new Date(json.getAsLong());
+      }
+
+    };
+  }
+
+  protected String convertObjToString(Object obj) {
+    if (obj == null) {
+      return "";
+    }
+
+    Gson gson = new GsonBuilder()
+      .registerTypeAdapter(Date.class, jsonDateSerialiazer)
+      .registerTypeAdapter(Date.class, jsonDateDeserialiazer).create();
+
+    return gson.toJson(obj);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
new file mode 100644
index 0000000..6b40cb5
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.manager;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.ambari.logsearch.model.response.LogData;
+import org.apache.ambari.logsearch.model.response.LogSearchResponse;
+import org.apache.ambari.logsearch.dao.SolrDaoBase;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocumentList;
+import org.springframework.data.solr.core.DefaultQueryParser;
+import org.springframework.data.solr.core.query.SimpleQuery;
+import org.springframework.data.solr.core.query.SolrDataQuery;
+
+public abstract class ManagerBase<LOG_DATA_TYPE extends LogData, 
SEARCH_RESPONSE extends LogSearchResponse> extends JsonManagerBase {
+  private static final Logger logger = Logger.getLogger(ManagerBase.class);
+
+  public ManagerBase() {
+    super();
+  }
+  
+  protected SEARCH_RESPONSE getLastPage(SolrDaoBase solrDoaBase, SimpleQuery 
lastPageQuery, String event) {
+    int maxRows = lastPageQuery.getRows();
+    SEARCH_RESPONSE logResponse = getLogAsPaginationProvided(lastPageQuery, 
solrDoaBase, event);
+    Long totalLogs = logResponse.getTotalCount();
+    int startIndex = (int)(totalLogs - totalLogs % maxRows);
+    int numberOfLogsOnLastPage = (int)(totalLogs - startIndex);
+    logResponse.setStartIndex(startIndex);
+    logResponse.setTotalCount(totalLogs);
+    logResponse.setPageSize(maxRows);
+    List<LOG_DATA_TYPE> docList = logResponse.getLogList();
+    List<LOG_DATA_TYPE> lastPageDocList = new ArrayList<>();
+    logResponse.setLogList(lastPageDocList);
+    int cnt = 0;
+    for (LOG_DATA_TYPE doc : docList) {
+      if (cnt < numberOfLogsOnLastPage) {
+        lastPageDocList.add(doc);
+      }
+      cnt++;
+    }
+    Collections.reverse(lastPageDocList);
+    return logResponse;
+  }
+
+  protected SEARCH_RESPONSE getLogAsPaginationProvided(SolrDataQuery 
solrQuery, SolrDaoBase solrDaoBase, String event) {
+    SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(solrQuery);
+    return getLogAsPaginationProvided(query, solrDaoBase, event);
+  }
+
+
+  protected SEARCH_RESPONSE getLogAsPaginationProvided(SolrQuery solrQuery, 
SolrDaoBase solrDaoBase, String event) {
+    QueryResponse response = solrDaoBase.process(solrQuery, event);
+    SEARCH_RESPONSE logResponse = createLogSearchResponse();
+    SolrDocumentList docList = response.getResults();
+    logResponse.setTotalCount(docList.getNumFound());
+    List<LOG_DATA_TYPE> serviceLogDataList = convertToSolrBeans(response);
+    if (!docList.isEmpty()) {
+      logResponse.setLogList(serviceLogDataList);
+      logResponse.setStartIndex((int) docList.getStart());
+      Integer rowNumber = solrQuery.getRows();
+      if (rowNumber == null) {
+        logger.error("No RowNumber was set in solrQuery");
+        return createLogSearchResponse();
+      }
+      logResponse.setPageSize(rowNumber);
+    }
+    return logResponse;
+  }
+
+  protected abstract List<LOG_DATA_TYPE> convertToSolrBeans(QueryResponse 
response);
+
+  protected abstract SEARCH_RESPONSE createLogSearchResponse();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b0f1e340/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
----------------------------------------------------------------------
diff --git 
a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
new file mode 100644
index 0000000..cbab651
--- /dev/null
+++ 
b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.manager;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.logsearch.conf.AuthPropsConfig;
+import org.apache.ambari.logsearch.model.response.NameValueData;
+import org.apache.ambari.logsearch.model.response.NameValueDataListResponse;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+
+@Named
+public class PublicManager extends JsonManagerBase {
+
+  @Inject
+  private AuthPropsConfig authPropsConfig;
+
+  public String getGeneralConfig() {
+    NameValueDataListResponse nameValueList = new NameValueDataListResponse();
+    List<NameValueData> nameValues = new ArrayList<>();
+    NameValueData nameValue = new NameValueData();
+    nameValue.setName("simpleAuth");
+    nameValue.setValue("" + authPropsConfig.isAuthSimpleEnabled());
+    nameValues.add(nameValue);
+    nameValueList.setvNameValues(nameValues);
+    return convertObjToString(nameValueList);
+  }
+}

Reply via email to