AMBARI-16435 : Change Namenode Total File operations widget to request rate 
metric instead of actual metric. (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/347dc63e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/347dc63e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/347dc63e

Branch: refs/heads/trunk
Commit: 347dc63e0907dc43113a46b109cc1fd35ff943c4
Parents: 47c7b5e
Author: Aravindan Vijayan <avija...@hortonworks.com>
Authored: Tue May 10 13:18:57 2016 -0700
Committer: Aravindan Vijayan <avija...@hortonworks.com>
Committed: Tue May 10 14:32:03 2016 -0700

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog240.java       | 118 ++++++++++++++++++-
 .../stacks/HDP/2.3/services/HDFS/widgets.json   |   8 +-
 .../server/upgrade/UpgradeCatalog240Test.java   |  84 ++++++++++++-
 3 files changed, 201 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/347dc63e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index 8c49ab4..4ed4a13 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -18,6 +18,9 @@
 
 package org.apache.ambari.server.upgrade;
 
+import java.io.File;
+import java.io.FileReader;
+import java.lang.reflect.Type;
 import java.sql.Clob;
 import java.sql.ResultSet;
 import java.sql.SQLException;
@@ -34,7 +37,10 @@ import java.util.Set;
 import java.util.UUID;
 import java.util.concurrent.atomic.AtomicLong;
 
+import com.google.common.reflect.TypeToken;
+import com.google.gson.Gson;
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
@@ -46,6 +52,7 @@ import org.apache.ambari.server.orm.dao.PrincipalTypeDAO;
 import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
 import org.apache.ambari.server.orm.dao.RoleAuthorizationDAO;
 import org.apache.ambari.server.orm.dao.UserDAO;
+import org.apache.ambari.server.orm.dao.WidgetDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.ClusterEntity;
 import org.apache.ambari.server.orm.entities.PermissionEntity;
@@ -56,6 +63,7 @@ import org.apache.ambari.server.orm.entities.ResourceEntity;
 import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
 import org.apache.ambari.server.orm.entities.RoleAuthorizationEntity;
 import org.apache.ambari.server.orm.entities.UserEntity;
+import org.apache.ambari.server.orm.entities.WidgetEntity;
 import org.apache.ambari.server.security.authorization.ResourceType;
 import org.apache.ambari.server.state.AlertFirmness;
 import org.apache.ambari.server.state.Cluster;
@@ -63,7 +71,12 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.RepositoryType;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.stack.WidgetLayout;
+import org.apache.ambari.server.state.stack.WidgetLayoutInfo;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -242,7 +255,7 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
     dbAccessor.addColumn(VIEWINSTANCE_TABLE,
       new DBColumnInfo(SHORT_URL_COLUMN, Long.class, null, null, true));
     dbAccessor.addFKConstraint(VIEWINSTANCE_TABLE, "FK_instance_url_id",
-            SHORT_URL_COLUMN, VIEWURL_TABLE, URL_ID_COLUMN, false);
+      SHORT_URL_COLUMN, VIEWURL_TABLE, URL_ID_COLUMN, false);
   }
 
   private void updateClusterTableDDL() throws SQLException {
@@ -277,6 +290,7 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
     updateClusterInheritedPermissionsConfig();
     consolidateUserRoles();
     createRolePrincipals();
+    updateHDFSWidgetDefinition();
   }
 
   protected void updateClusterInheritedPermissionsConfig() throws SQLException 
{
@@ -1124,7 +1138,7 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
    */
   protected void updateAlertTargetTable() throws SQLException {
     dbAccessor.addColumn(ALERT_TARGET_TABLE,
-        new DBColumnInfo(ALERT_TARGET_ENABLED_COLUMN, Short.class, null, 1, 
false));
+      new DBColumnInfo(ALERT_TARGET_ENABLED_COLUMN, Short.class, null, 1, 
false));
   }
 
   protected void setRoleSortOrder() throws SQLException {
@@ -1343,10 +1357,10 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
     // ALTER TABLE servicecomponentdesiredstate ADD COLUMN
     // recovery_enabled SMALLINT DEFAULT 0 NOT NULL
     dbAccessor.addColumn(SERVICE_COMPONENT_DESIRED_STATE_TABLE,
-            new DBColumnInfo(RECOVERY_ENABLED_COL, Short.class, null, 0, 
false));
+      new DBColumnInfo(RECOVERY_ENABLED_COL, Short.class, null, 0, false));
 
     dbAccessor.addColumn(SERVICE_COMPONENT_DESIRED_STATE_TABLE,
-            new DBColumnInfo(DESIRED_VERSION_COLUMN_NAME, String.class, 255, 
State.UNKNOWN.toString(), false));
+      new DBColumnInfo(DESIRED_VERSION_COLUMN_NAME, String.class, 255, 
State.UNKNOWN.toString(), false));
   }
 
   /**
@@ -1861,4 +1875,100 @@ public class UpgradeCatalog240 extends 
AbstractUpgradeCatalog {
 
     return (cluster == null) ? "_unknown_" : cluster.getClusterName();
   }
+
+  protected void updateHDFSWidgetDefinition() throws AmbariException {
+    LOG.info("Updating HDFS widget definition.");
+
+    Map<String, List<String>> widgetMap = new HashMap<>();
+    Map<String, String> sectionLayoutMap = new HashMap<>();
+
+    List<String> hdfsSummaryWidgets = Collections.singletonList("NameNode 
Operations");
+    widgetMap.put("HDFS_SUMMARY", hdfsSummaryWidgets);
+    sectionLayoutMap.put("HDFS_SUMMARY", "default_hdfs_dashboard");
+
+    updateWidgetDefinitionsForService("HDFS", widgetMap, sectionLayoutMap);
+  }
+
+  private void updateWidgetDefinitionsForService(String serviceName, 
Map<String, List<String>> widgetMap,
+                                                 Map<String, String> 
sectionLayoutMap) throws AmbariException {
+    AmbariManagementController ambariManagementController = 
injector.getInstance(AmbariManagementController.class);
+    AmbariMetaInfo ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+    Type widgetLayoutType = new TypeToken<Map<String, 
List<WidgetLayout>>>(){}.getType();
+    Gson gson = injector.getInstance(Gson.class);
+    WidgetDAO widgetDAO = injector.getInstance(WidgetDAO.class);
+
+    Clusters clusters = ambariManagementController.getClusters();
+
+    Map<String, Cluster> clusterMap = getCheckedClusterMap(clusters);
+    for (final Cluster cluster : clusterMap.values()) {
+      long clusterID = cluster.getClusterId();
+
+      StackId stackId = cluster.getDesiredStackVersion();
+      Map<String, Object> widgetDescriptor = null;
+      StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(), 
stackId.getStackVersion());
+      ServiceInfo serviceInfo = stackInfo.getService(serviceName);
+      if (serviceInfo == null) {
+        LOG.info("Skipping updating widget definition, because " + serviceName 
+  " service is not present in cluster " +
+          "cluster_name= " + cluster.getClusterName());
+        continue;
+      }
+
+      for (String section : widgetMap.keySet()) {
+        List<String> widgets = widgetMap.get(section);
+        for (String widgetName : widgets) {
+          List<WidgetEntity> widgetEntities = widgetDAO.findByName(clusterID,
+            widgetName, "ambari", section);
+
+          if (widgetEntities != null && widgetEntities.size() > 0) {
+            WidgetEntity entityToUpdate = null;
+            if (widgetEntities.size() > 1) {
+              LOG.info("Found more that 1 entity with name = "+ widgetName +
+                " for cluster = " + cluster.getClusterName() + ", skipping 
update.");
+            } else {
+              entityToUpdate = widgetEntities.iterator().next();
+            }
+            if (entityToUpdate != null) {
+              LOG.info("Updating widget: " + entityToUpdate.getWidgetName());
+              // Get the definition from widgets.json file
+              WidgetLayoutInfo targetWidgetLayoutInfo = null;
+              File widgetDescriptorFile = 
serviceInfo.getWidgetsDescriptorFile();
+              if (widgetDescriptorFile != null && 
widgetDescriptorFile.exists()) {
+                try {
+                  widgetDescriptor = gson.fromJson(new 
FileReader(widgetDescriptorFile), widgetLayoutType);
+                } catch (Exception ex) {
+                  String msg = "Error loading widgets from file: " + 
widgetDescriptorFile;
+                  LOG.error(msg, ex);
+                  widgetDescriptor = null;
+                }
+              }
+              if (widgetDescriptor != null) {
+                LOG.debug("Loaded widget descriptor: " + widgetDescriptor);
+                for (Object artifact : widgetDescriptor.values()) {
+                  List<WidgetLayout> widgetLayouts = (List<WidgetLayout>) 
artifact;
+                  for (WidgetLayout widgetLayout : widgetLayouts) {
+                    if 
(widgetLayout.getLayoutName().equals(sectionLayoutMap.get(section))) {
+                      for (WidgetLayoutInfo layoutInfo : 
widgetLayout.getWidgetLayoutInfoList()) {
+                        if (layoutInfo.getWidgetName().equals(widgetName)) {
+                          targetWidgetLayoutInfo = layoutInfo;
+                        }
+                      }
+                    }
+                  }
+                }
+              }
+              if (targetWidgetLayoutInfo != null) {
+                
entityToUpdate.setMetrics(gson.toJson(targetWidgetLayoutInfo.getMetricsInfo()));
+                
entityToUpdate.setWidgetValues(gson.toJson(targetWidgetLayoutInfo.getValues()));
+                
entityToUpdate.setDescription(targetWidgetLayoutInfo.getDescription());
+                widgetDAO.merge(entityToUpdate);
+              } else {
+                LOG.warn("Unable to find widget layout info for " + widgetName 
+
+                  " in the stack: " + stackId);
+              }
+            }
+          }
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/347dc63e/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json 
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
index 98c1f86..eeb9ff8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
@@ -275,13 +275,13 @@
         },
         {
           "widget_name": "NameNode Operations",
-          "description": "Total number of file operation over time.",
+          "description": "Rate per second of number of file operation over 
time.",
           "widget_type": "GRAPH",
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.namenode.TotalFileOps",
-              "metric_path": "metrics/dfs/namenode/TotalFileOps",
+              "name": "dfs.namenode.TotalFileOps._rate",
+              "metric_path": "metrics/dfs/namenode/TotalFileOps._rate",
               "service_name": "HDFS",
               "component_name": "NAMENODE",
               "host_component_criteria": 
"host_components/metrics/dfs/FSNamesystem/HAState=active"
@@ -290,7 +290,7 @@
           "values": [
             {
               "name": "NameNode File Operations",
-              "value": "${dfs.namenode.TotalFileOps}"
+              "value": "${dfs.namenode.TotalFileOps._rate}"
             }
           ],
           "properties": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/347dc63e/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index 11f3865..78a910a 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -28,6 +28,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertNull;
 
+import java.io.File;
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
 import java.sql.Connection;
@@ -61,6 +62,7 @@ import org.apache.ambari.server.orm.dao.ClusterDAO;
 import org.apache.ambari.server.orm.dao.PrivilegeDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
 import org.apache.ambari.server.orm.dao.UserDAO;
+import org.apache.ambari.server.orm.dao.WidgetDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.ClusterEntity;
 import org.apache.ambari.server.orm.entities.PermissionEntity;
@@ -69,14 +71,20 @@ import 
org.apache.ambari.server.orm.entities.PrivilegeEntity;
 import org.apache.ambari.server.orm.entities.ResourceEntity;
 import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
 import org.apache.ambari.server.orm.entities.UserEntity;
+import org.apache.ambari.server.orm.entities.WidgetEntity;
 import org.apache.ambari.server.security.authorization.ResourceType;
+import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.AlertFirmness;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.stack.OsFamily;
+import org.apache.commons.io.FileUtils;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
 import org.easymock.EasyMock;
@@ -93,6 +101,7 @@ import com.google.inject.Module;
 import com.google.inject.Provider;
 
 import junit.framework.Assert;
+import org.junit.rules.TemporaryFolder;
 
 public class UpgradeCatalog240Test {
   private static Injector injector;
@@ -416,7 +425,7 @@ public class UpgradeCatalog240Test {
     Method consolidateUserRoles = 
UpgradeCatalog240.class.getDeclaredMethod("consolidateUserRoles");
     Method updateClusterInheritedPermissionsConfig = 
UpgradeCatalog240.class.getDeclaredMethod("updateClusterInheritedPermissionsConfig");
     Method createRolePrincipals = 
UpgradeCatalog240.class.getDeclaredMethod("createRolePrincipals");
-
+    Method updateHDFSWidget = 
UpgradeCatalog240.class.getDeclaredMethod("updateHDFSWidgetDefinition");
 
     Capture<String> capturedStatements = newCapture(CaptureType.ALL);
 
@@ -441,6 +450,7 @@ public class UpgradeCatalog240Test {
             .addMockedMethod(consolidateUserRoles)
             .addMockedMethod(updateClusterInheritedPermissionsConfig)
             .addMockedMethod(createRolePrincipals)
+            .addMockedMethod(updateHDFSWidget)
             .createMock();
 
     Field field = AbstractUpgradeCatalog.class.getDeclaredField("dbAccessor");
@@ -463,6 +473,7 @@ public class UpgradeCatalog240Test {
     upgradeCatalog240.consolidateUserRoles();
     upgradeCatalog240.createRolePrincipals();
     upgradeCatalog240.updateClusterInheritedPermissionsConfig();
+    upgradeCatalog240.updateHDFSWidgetDefinition();
 
     replay(upgradeCatalog240, dbAccessor);
 
@@ -1373,5 +1384,76 @@ public class UpgradeCatalog240Test {
 
 
   }
+
+  @Test
+  public void testHDFSWidgetUpdate() throws Exception {
+    final Clusters clusters = createNiceMock(Clusters.class);
+    final Cluster cluster = createNiceMock(Cluster.class);
+    final AmbariManagementController controller = 
createNiceMock(AmbariManagementController.class);
+    final Gson gson = new Gson();
+    final WidgetDAO widgetDAO = createNiceMock(WidgetDAO.class);
+    final AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class);
+    WidgetEntity widgetEntity = createNiceMock(WidgetEntity.class);
+    StackId stackId = new StackId("HDP", "2.0.0");
+    StackInfo stackInfo = createNiceMock(StackInfo.class);
+    ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
+
+    String widgetStr = "{\n" +
+      "  \"layouts\": [\n" +
+      "    {\n" +
+      "      \"layout_name\": \"default_hdfs_dashboard\",\n" +
+      "      \"display_name\": \"Standard HDFS Dashboard\",\n" +
+      "      \"section_name\": \"HDFS_SUMMARY\",\n" +
+      "      \"widgetLayoutInfo\": [\n" +
+      "        {\n" +
+      "          \"widget_name\": \"NameNode Operations\",\n" +
+      "          \"metrics\": [],\n" +
+      "          \"values\": []\n" +
+      "        }\n" +
+      "      ]\n" +
+      "    }\n" +
+      "  ]\n" +
+      "}";
+
+    TemporaryFolder temporaryFolder = new TemporaryFolder();
+    File dataDirectory = temporaryFolder.newFolder();
+    File file = new File(dataDirectory, "hdfs_widget.json");
+    FileUtils.writeStringToFile(file, widgetStr);
+
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        
bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
+        bind(AmbariManagementController.class).toInstance(controller);
+        bind(Clusters.class).toInstance(clusters);
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+        bind(Gson.class).toInstance(gson);
+        bind(WidgetDAO.class).toInstance(widgetDAO);
+        
bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
+        bind(AmbariMetaInfo.class).toInstance(metaInfo);
+      }
+    });
+    expect(controller.getClusters()).andReturn(clusters).anyTimes();
+    expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", cluster);
+    }}).anyTimes();
+    expect(cluster.getClusterId()).andReturn(1L).anyTimes();
+    expect(stackInfo.getService("HDFS")).andReturn(serviceInfo);
+    expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
+    expect(metaInfo.getStack("HDP", "2.0.0")).andReturn(stackInfo).anyTimes();
+    expect(serviceInfo.getWidgetsDescriptorFile()).andReturn(file).anyTimes();
+
+    expect(widgetDAO.findByName(1L, "NameNode Operations", "ambari", 
"HDFS_SUMMARY"))
+      .andReturn(Collections.singletonList(widgetEntity));
+    expect(widgetDAO.merge(widgetEntity)).andReturn(null);
+    expect(widgetEntity.getWidgetName()).andReturn("Namenode 
Operations").anyTimes();
+
+    replay(clusters, cluster, controller, widgetDAO, metaInfo, widgetEntity, 
stackInfo, serviceInfo);
+
+    
mockInjector.getInstance(UpgradeCatalog240.class).updateHDFSWidgetDefinition();
+
+    verify(clusters, cluster, controller, widgetDAO, widgetEntity, stackInfo, 
serviceInfo);
+  }
 }
 

Reply via email to