This is an automated email from the ASF dual-hosted git repository.
szetszwo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new e461c699dd HDDS-10801. Replace GSON with Jackson in hadoop-ozone
classes. (#6651)
e461c699dd is described below
commit e461c699dd333fa50662f31a61cb990d9b68a298
Author: Arafat2198 <[email protected]>
AuthorDate: Sun May 12 22:43:57 2024 +0530
HDDS-10801. Replace GSON with Jackson in hadoop-ozone classes. (#6651)
---
.../org/apache/hadoop/hdds/server/JsonUtils.java | 14 +-
.../java/org/apache/hadoop/hdds/JsonTestUtils.java | 24 ++
.../DeletedBlocksTransactionInfoWrapper.java | 9 +-
.../ozone/recon/TestReconWithOzoneManager.java | 10 +-
.../hadoop/ozone/shell/TestOzoneShellHA.java | 26 +-
.../ozone/recon/heatmap/TestHeatMapInfo.java | 440 ++++++++++-----------
.../scm/ResetDeletedBlockRetryCountSubcommand.java | 15 +-
7 files changed, 281 insertions(+), 257 deletions(-)
diff --git
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/server/JsonUtils.java
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/server/JsonUtils.java
index 1a5c7f8c3b..f6894b17e3 100644
---
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/server/JsonUtils.java
+++
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/server/JsonUtils.java
@@ -20,12 +20,11 @@ package org.apache.hadoop.hdds.server;
import java.io.File;
import java.io.IOException;
-import java.util.HashMap;
+import java.io.Reader;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.MappingIterator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
@@ -103,14 +102,13 @@ public final class JsonUtils {
return MAPPER.readTree(content);
}
- public static List<HashMap<String, Object>> readTreeAsListOfMaps(String json)
- throws IOException {
- return MAPPER.readValue(json,
- new TypeReference<List<HashMap<String, Object>>>() {
- });
+ /**
+ * Reads JSON content from a Reader and deserializes it into a Java object.
+ */
+ public static <T> T readFromReader(Reader reader, Class<T> valueType) throws
IOException {
+ return MAPPER.readValue(reader, valueType);
}
-
/**
* Utility to sequentially write a large collection of items to a file.
*/
diff --git
a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/JsonTestUtils.java
b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/JsonTestUtils.java
index dbef73c4cc..ac19c30a4a 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/JsonTestUtils.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/JsonTestUtils.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdds;
import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
@@ -27,6 +28,8 @@ import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import java.io.IOException;
+import java.util.List;
+import java.util.Map;
/**
* JSON Utility functions used in ozone for Test classes.
@@ -67,4 +70,25 @@ public final class JsonTestUtils {
public static JsonNode readTree(String content) throws IOException {
return MAPPER.readTree(content);
}
+
+ public static List<Map<String, Object>> readTreeAsListOfMaps(String json)
+ throws IOException {
+ return MAPPER.readValue(json,
+ new TypeReference<List<Map<String, Object>>>() {
+ });
+ }
+
+ /**
+ * Converts a JsonNode into a Java object of the specified type.
+ * @param node The JsonNode to convert.
+ * @param valueType The target class of the Java object.
+ * @param <T> The type of the Java object.
+ * @return A Java object of type T, populated with data from the JsonNode.
+ * @throws IOException
+ */
+ public static <T> T treeToValue(JsonNode node, Class<T> valueType)
+ throws IOException {
+ return MAPPER.treeToValue(node, valueType);
+ }
+
}
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeletedBlocksTransactionInfoWrapper.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeletedBlocksTransactionInfoWrapper.java
index 64ced8dce4..be5c9a03e0 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeletedBlocksTransactionInfoWrapper.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/container/common/helpers/DeletedBlocksTransactionInfoWrapper.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hdds.scm.container.common.helpers;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
import
org.apache.hadoop.hdds.protocol.proto.HddsProtos.DeletedBlocksTransactionInfo;
import
org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
import java.util.List;
@@ -31,8 +33,11 @@ public class DeletedBlocksTransactionInfoWrapper {
private final List<Long> localIdList;
private final int count;
- public DeletedBlocksTransactionInfoWrapper(long txID, long containerID,
- List<Long> localIdList, int count) {
+ @JsonCreator
+ public DeletedBlocksTransactionInfoWrapper(@JsonProperty("txID") long txID,
+ @JsonProperty("containerID") long
containerID,
+ @JsonProperty("localIdList")
List<Long> localIdList,
+ @JsonProperty("count") int count)
{
this.txID = txID;
this.containerID = containerID;
this.localIdList = localIdList;
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/recon/TestReconWithOzoneManager.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/recon/TestReconWithOzoneManager.java
index d52b0e99b2..f51d12a7c5 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/recon/TestReconWithOzoneManager.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/recon/TestReconWithOzoneManager.java
@@ -38,16 +38,16 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
-import java.util.HashMap;
+import java.util.Map;
import java.util.concurrent.TimeUnit;
+import org.apache.hadoop.hdds.JsonTestUtils;
import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.client.StandaloneReplicationConfig;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.HddsTestUtils;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
-import org.apache.hadoop.hdds.server.JsonUtils;
import org.apache.hadoop.hdds.utils.db.RDBStore;
import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.hdds.utils.db.TableIterator;
@@ -383,11 +383,11 @@ public class TestReconWithOzoneManager {
String taskName,
String entityAttribute)
throws IOException {
- List<HashMap<String, Object>> taskStatusList =
- JsonUtils.readTreeAsListOfMaps(taskStatusResponse);
+ List<Map<String, Object>> taskStatusList =
+ JsonTestUtils.readTreeAsListOfMaps(taskStatusResponse);
// Stream through the list to find the task entity matching the taskName
- Optional<HashMap<String, Object>> taskEntity = taskStatusList.stream()
+ Optional<Map<String, Object>> taskEntity = taskStatusList.stream()
.filter(task -> taskName.equals(task.get("taskName")))
.findFirst();
diff --git
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneShellHA.java
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneShellHA.java
index f86fd46946..4dd17a4030 100644
---
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneShellHA.java
+++
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/shell/TestOzoneShellHA.java
@@ -24,7 +24,7 @@ import java.io.IOException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.URI;
-import java.util.ArrayList;
+import java.util.Map;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
@@ -68,10 +68,9 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.ozone.test.GenericTestUtils;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.ozone.om.TrashPolicyOzone;
+import org.apache.hadoop.hdds.JsonTestUtils;
import com.google.common.base.Strings;
-import com.google.gson.Gson;
-import com.google.gson.internal.LinkedTreeMap;
import static java.nio.charset.StandardCharsets.UTF_8;
import static
org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
@@ -409,12 +408,11 @@ public class TestOzoneShellHA {
}
/**
- * Parse output into ArrayList with Gson.
+ * Parse output into ArrayList with Jackson.
* @return ArrayList
*/
- private ArrayList<LinkedTreeMap<String, String>> parseOutputIntoArrayList()
- throws UnsupportedEncodingException {
- return new Gson().fromJson(out.toString(DEFAULT_ENCODING),
ArrayList.class);
+ private List<Map<String, Object>> parseOutputIntoArrayList() throws
IOException {
+ return JsonTestUtils.readTreeAsListOfMaps(out.toString(DEFAULT_ENCODING));
}
@Test
@@ -488,7 +486,7 @@ public class TestOzoneShellHA {
* Test ozone shell list command.
*/
@Test
- public void testOzoneShCmdList() throws UnsupportedEncodingException {
+ public void testOzoneShCmdList() throws IOException {
// Part of listing keys test.
generateKeys("/volume4", "/bucket", "");
final String destinationBucket = "o3://" + omServiceId + "/volume4/bucket";
@@ -1671,7 +1669,7 @@ public class TestOzoneShellHA {
}
public void testListVolumeBucketKeyShouldPrintValidJsonArray()
- throws UnsupportedEncodingException {
+ throws IOException {
final List<String> testVolumes =
Arrays.asList("jsontest-vol1", "jsontest-vol2", "jsontest-vol3");
@@ -1696,7 +1694,7 @@ public class TestOzoneShellHA {
execute(ozoneShell, new String[] {"volume", "list"});
// Expect valid JSON array
- final ArrayList<LinkedTreeMap<String, String>> volumeListOut =
+ final List<Map<String, Object>> volumeListOut =
parseOutputIntoArrayList();
// Can include s3v and volumes from other test cases that aren't cleaned
up,
// hence >= instead of equals.
@@ -1711,7 +1709,7 @@ public class TestOzoneShellHA {
execute(ozoneShell, new String[] {"bucket", "list", firstVolumePrefix});
// Expect valid JSON array as well
- final ArrayList<LinkedTreeMap<String, String>> bucketListOut =
+ final List<Map<String, Object>> bucketListOut =
parseOutputIntoArrayList();
assertEquals(testBuckets.size(), bucketListOut.size());
final HashSet<String> bucketSet = new HashSet<>(testBuckets);
@@ -1724,7 +1722,7 @@ public class TestOzoneShellHA {
execute(ozoneShell, new String[] {"key", "list", keyPathPrefix});
// Expect valid JSON array as well
- final ArrayList<LinkedTreeMap<String, String>> keyListOut =
+ final List<Map<String, Object>> keyListOut =
parseOutputIntoArrayList();
assertEquals(testKeys.size(), keyListOut.size());
final HashSet<String> keySet = new HashSet<>(testKeys);
@@ -1977,7 +1975,7 @@ public class TestOzoneShellHA {
execute(ozoneShell, new String[] {"bucket", "list", "/volume1"});
// Expect valid JSON array
- final ArrayList<LinkedTreeMap<String, String>> bucketListOut =
+ final List<Map<String, Object>> bucketListOut =
parseOutputIntoArrayList();
assertEquals(1, bucketListOut.size());
@@ -1996,7 +1994,7 @@ public class TestOzoneShellHA {
execute(ozoneShell, new String[] {"bucket", "list", "/volume1"});
// Expect valid JSON array
- final ArrayList<LinkedTreeMap<String, String>> bucketListLinked =
+ final List<Map<String, Object>> bucketListLinked =
parseOutputIntoArrayList();
assertEquals(2, bucketListLinked.size());
diff --git
a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/heatmap/TestHeatMapInfo.java
b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/heatmap/TestHeatMapInfo.java
index 856b556e8a..40d7cbd477 100644
---
a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/heatmap/TestHeatMapInfo.java
+++
b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/heatmap/TestHeatMapInfo.java
@@ -18,11 +18,10 @@
package org.apache.hadoop.ozone.recon.heatmap;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
+import com.fasterxml.jackson.databind.JsonNode;
+import org.apache.hadoop.hdds.JsonTestUtils;
import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager;
+import org.apache.hadoop.hdds.server.JsonUtils;
import org.apache.hadoop.ozone.recon.ReconTestInjector;
import org.apache.hadoop.ozone.recon.api.types.EntityMetaData;
import org.apache.hadoop.ozone.recon.api.types.EntityReadAccessHeatMapResponse;
@@ -745,35 +744,39 @@ public class TestHeatMapInfo {
public void testHeatMapGeneratedInfo() throws IOException {
// Setup
// Run the test
- JsonElement jsonElement = JsonParser.parseString(auditRespStr);
- JsonObject jsonObject = jsonElement.getAsJsonObject();
- JsonElement facets = jsonObject.get("facets");
- JsonObject facetsBucketsObject =
- facets.getAsJsonObject().get("resources")
- .getAsJsonObject();
- ObjectMapper objectMapper = new ObjectMapper();
+ // Parse the JSON string to JsonNode
+ JsonNode rootNode = JsonUtils.readTree(auditRespStr);
+ JsonNode facetsNode = rootNode.path("facets");
+ JsonNode resourcesNode = facetsNode.path("resources");
+
+ // Deserialize the resources node directly if it's not missing
HeatMapProviderDataResource auditLogFacetsResources =
- objectMapper.readValue(
- facetsBucketsObject.toString(), HeatMapProviderDataResource.class);
- EntityMetaData[] entities = auditLogFacetsResources.getMetaDataList();
- List<EntityMetaData> entityMetaDataList =
- Arrays.stream(entities).collect(Collectors.toList());
- EntityReadAccessHeatMapResponse entityReadAccessHeatMapResponse =
- heatMapUtil.generateHeatMap(entityMetaDataList);
-
assertThat(entityReadAccessHeatMapResponse.getChildren().size()).isGreaterThan(0);
- assertEquals(12, entityReadAccessHeatMapResponse.getChildren().size());
- assertEquals(25600, entityReadAccessHeatMapResponse.getSize());
- assertEquals(2924, entityReadAccessHeatMapResponse.getMinAccessCount());
- assertEquals(155074, entityReadAccessHeatMapResponse.getMaxAccessCount());
- assertEquals("root", entityReadAccessHeatMapResponse.getLabel());
- assertEquals(0.0,
entityReadAccessHeatMapResponse.getChildren().get(0).getColor());
- assertEquals(0.442,
- entityReadAccessHeatMapResponse.getChildren().get(0).getChildren()
- .get(0).getChildren().get(1).getColor());
- assertEquals(0.058,
- entityReadAccessHeatMapResponse.getChildren().get(0).getChildren()
- .get(1).getChildren().get(3).getColor());
+ JsonTestUtils.treeToValue(resourcesNode,
HeatMapProviderDataResource.class);
+
+ if (auditLogFacetsResources != null) {
+ EntityMetaData[] entities = auditLogFacetsResources.getMetaDataList();
+ List<EntityMetaData> entityMetaDataList =
+ Arrays.stream(entities).collect(Collectors.toList());
+ EntityReadAccessHeatMapResponse entityReadAccessHeatMapResponse =
+ heatMapUtil.generateHeatMap(entityMetaDataList);
+ assertThat(
+ entityReadAccessHeatMapResponse.getChildren().size()).isGreaterThan(
+ 0);
+ assertEquals(12, entityReadAccessHeatMapResponse.getChildren().size());
+ assertEquals(25600, entityReadAccessHeatMapResponse.getSize());
+ assertEquals(2924, entityReadAccessHeatMapResponse.getMinAccessCount());
+ assertEquals(155074,
entityReadAccessHeatMapResponse.getMaxAccessCount());
+ assertEquals("root", entityReadAccessHeatMapResponse.getLabel());
+ assertEquals(0.0,
+ entityReadAccessHeatMapResponse.getChildren().get(0).getColor());
+ assertEquals(0.442,
+ entityReadAccessHeatMapResponse.getChildren().get(0).getChildren()
+ .get(0).getChildren().get(1).getColor());
+ assertEquals(0.058,
+ entityReadAccessHeatMapResponse.getChildren().get(0).getChildren()
+ .get(1).getChildren().get(3).getColor());
+ }
}
@Test
@@ -831,54 +834,51 @@ public class TestHeatMapInfo {
" }\n" +
" }\n" +
"}";
- JsonElement jsonElement =
- JsonParser.parseString(auditRespStrWithVolumeEntityType);
- JsonObject jsonObject = jsonElement.getAsJsonObject();
- JsonElement facets = jsonObject.get("facets");
- JsonElement resources = facets.getAsJsonObject().get("resources");
- JsonObject facetsBucketsObject = new JsonObject();
- if (null != resources) {
- facetsBucketsObject = resources.getAsJsonObject();
- }
- ObjectMapper objectMapper = new ObjectMapper();
+ JsonNode rootNode = JsonUtils.readTree(auditRespStrWithVolumeEntityType);
+ JsonNode facetsNode = rootNode.path("facets");
+ JsonNode resourcesNode = facetsNode.path("resources");
+
+ // Deserialize the resources node directly if it's not missing
HeatMapProviderDataResource auditLogFacetsResources =
- objectMapper.readValue(
- facetsBucketsObject.toString(), HeatMapProviderDataResource.class);
- EntityMetaData[] entities = auditLogFacetsResources.getMetaDataList();
- if (null != entities && entities.length > 0) {
- List<EntityMetaData> entityMetaDataList =
- Arrays.stream(entities).collect(Collectors.toList());
- // Below heatmap response would be of format like:
- //{
- // "label": "root",
- // "path": "/",
- // "children": [
- // {
- // "label": "s3v",
- // "path": "s3v",
- // "size": 256
- // },
- // {
- // "label": "testnewvol2",
- // "path": "testnewvol2",
- // "size": 256
- // }
- // ],
- // "size": 512,
- // "minAccessCount": 19263
- //}
- EntityReadAccessHeatMapResponse entityReadAccessHeatMapResponse =
- heatMapUtil.generateHeatMap(entityMetaDataList);
-
assertThat(entityReadAccessHeatMapResponse.getChildren().size()).isGreaterThan(0);
- assertEquals(2, entityReadAccessHeatMapResponse.getChildren().size());
- assertEquals(512, entityReadAccessHeatMapResponse.getSize());
- assertEquals(8590, entityReadAccessHeatMapResponse.getMinAccessCount());
- assertEquals(19263, entityReadAccessHeatMapResponse.getMaxAccessCount());
- assertEquals(1.0,
entityReadAccessHeatMapResponse.getChildren().get(0).getColor());
- assertEquals("root", entityReadAccessHeatMapResponse.getLabel());
- } else {
- assertNull(entities);
+ JsonTestUtils.treeToValue(resourcesNode,
HeatMapProviderDataResource.class);
+
+ if (auditLogFacetsResources != null) {
+ EntityMetaData[] entities = auditLogFacetsResources.getMetaDataList();
+ if (null != entities && entities.length > 0) {
+ List<EntityMetaData> entityMetaDataList =
+ Arrays.stream(entities).collect(Collectors.toList());
+ // Below heatmap response would be of format like:
+ //{
+ // "label": "root",
+ // "path": "/",
+ // "children": [
+ // {
+ // "label": "s3v",
+ // "path": "s3v",
+ // "size": 256
+ // },
+ // {
+ // "label": "testnewvol2",
+ // "path": "testnewvol2",
+ // "size": 256
+ // }
+ // ],
+ // "size": 512,
+ // "minAccessCount": 19263
+ //}
+ EntityReadAccessHeatMapResponse entityReadAccessHeatMapResponse =
+ heatMapUtil.generateHeatMap(entityMetaDataList);
+
assertThat(entityReadAccessHeatMapResponse.getChildren().size()).isGreaterThan(0);
+ assertEquals(2, entityReadAccessHeatMapResponse.getChildren().size());
+ assertEquals(512, entityReadAccessHeatMapResponse.getSize());
+ assertEquals(8590,
entityReadAccessHeatMapResponse.getMinAccessCount());
+ assertEquals(19263,
entityReadAccessHeatMapResponse.getMaxAccessCount());
+ assertEquals(1.0,
entityReadAccessHeatMapResponse.getChildren().get(0).getColor());
+ assertEquals("root", entityReadAccessHeatMapResponse.getLabel());
+ } else {
+ assertNull(entities);
+ }
}
}
@@ -965,150 +965,150 @@ public class TestHeatMapInfo {
" }\n" +
" }\n" +
"}";
- JsonElement jsonElement =
- JsonParser.parseString(auditRespStrWithPathAndBucketEntityType);
- JsonObject jsonObject = jsonElement.getAsJsonObject();
- JsonElement facets = jsonObject.get("facets");
- JsonElement resources = facets.getAsJsonObject().get("resources");
- JsonObject facetsBucketsObject = new JsonObject();
- if (null != resources) {
- facetsBucketsObject = resources.getAsJsonObject();
- }
- ObjectMapper objectMapper = new ObjectMapper();
- HeatMapProviderDataResource auditLogFacetsResources =
- objectMapper.readValue(
- facetsBucketsObject.toString(), HeatMapProviderDataResource.class);
- EntityMetaData[] entities = auditLogFacetsResources.getMetaDataList();
- if (null != entities && entities.length > 0) {
- List<EntityMetaData> entityMetaDataList =
- Arrays.stream(entities).collect(Collectors.toList());
- // Below heatmap response would be of format like:
- //{
- // "label": "root",
- // "path": "/",
- // "children": [
- // {
- // "label": "testnewvol2",
- // "path": "testnewvol2",
- // "children": [
- // {
- // "label": "fsobuck11",
- // "path": "/testnewvol2/fsobuck11",
- // "children": [
- // {
- // "label": "",
- // "path": "/testnewvol2/fsobuck11/",
- // "size": 100,
- // "accessCount": 701,
- // "color": 1.0
- // }
- // ],
- // "size": 100,
- // "minAccessCount": 701,
- // "maxAccessCount": 701
- // },
- // {
- // "label": "fsobuck12",
- // "path": "/testnewvol2/fsobuck12",
- // "children": [
- // {
- // "label": "",
- // "path": "/testnewvol2/fsobuck12/",
- // "size": 100,
- // "accessCount": 701,
- // "color": 1.0
- // }
- // ],
- // "size": 100,
- // "minAccessCount": 701,
- // "maxAccessCount": 701
- // },
- // {
- // "label": "fsobuck13",
- // "path": "/testnewvol2/fsobuck13",
- // "children": [
- // {
- // "label": "",
- // "path": "/testnewvol2/fsobuck13/",
- // "size": 100,
- // "accessCount": 701,
- // "color": 1.0
- // }
- // ],
- // "size": 100,
- // "minAccessCount": 701,
- // "maxAccessCount": 701
- // },
- // {
- // "label": "obsbuck11",
- // "path": "/testnewvol2/obsbuck11",
- // "children": [
- // {
- // "label": "",
- // "path": "/testnewvol2/obsbuck11/",
- // "size": 107,
- // "accessCount": 263,
- // "color": 1.0
- // }
- // ],
- // "size": 107,
- // "minAccessCount": 263,
- // "maxAccessCount": 263
- // },
- // {
- // "label": "obsbuck12",
- // "path": "/testnewvol2/obsbuck12",
- // "children": [
- // {
- // "label": "",
- // "path": "/testnewvol2/obsbuck12/",
- // "size": 100,
- // "accessCount": 200,
- // "color": 1.0
- // }
- // ],
- // "size": 100,
- // "minAccessCount": 200,
- // "maxAccessCount": 200
- // },
- // {
- // "label": "obsbuck13",
- // "path": "/testnewvol2/obsbuck13",
- // "children": [
- // {
- // "label": "",
- // "path": "/testnewvol2/obsbuck13/",
- // "size": 100,
- // "accessCount": 200,
- // "color": 1.0
- // }
- // ],
- // "size": 100,
- // "minAccessCount": 200,
- // "maxAccessCount": 200
- // }
- // ],
- // "size": 607
- // }
- // ],
- // "size": 607,
- // "minAccessCount": 200,
- // "maxAccessCount": 701
- //}
- EntityReadAccessHeatMapResponse entityReadAccessHeatMapResponse =
- heatMapUtil.generateHeatMap(entityMetaDataList);
-
assertThat(entityReadAccessHeatMapResponse.getChildren().size()).isGreaterThan(0);
- assertEquals(2,
- entityReadAccessHeatMapResponse.getChildren().size());
- assertEquals(0.0,
- entityReadAccessHeatMapResponse.getChildren().get(0).getColor());
- String path =
- entityReadAccessHeatMapResponse.getChildren().get(1).getChildren()
- .get(0).getPath();
- assertEquals("/testnewvol2/fsobuck11", path);
- } else {
- assertNull(entities);
+ JsonNode rootNode =
JsonUtils.readTree(auditRespStrWithPathAndBucketEntityType);
+ // Navigate to the nested JSON objects
+ JsonNode facetsNode = rootNode.path("facets");
+ JsonNode resourcesNode = facetsNode.path("resources");
+ // Deserialize the resources node directly if it's not missing
+ HeatMapProviderDataResource auditLogFacetsResources = null;
+ auditLogFacetsResources =
+ JsonTestUtils.treeToValue(resourcesNode,
HeatMapProviderDataResource.class);
+
+ if (auditLogFacetsResources != null) {
+ EntityMetaData[] entities = auditLogFacetsResources.getMetaDataList();
+ if (null != entities && entities.length > 0) {
+ List<EntityMetaData> entityMetaDataList =
+ Arrays.stream(entities).collect(Collectors.toList());
+ // Below heatmap response would be of format like:
+ //{
+ // "label": "root",
+ // "path": "/",
+ // "children": [
+ // {
+ // "label": "testnewvol2",
+ // "path": "testnewvol2",
+ // "children": [
+ // {
+ // "label": "fsobuck11",
+ // "path": "/testnewvol2/fsobuck11",
+ // "children": [
+ // {
+ // "label": "",
+ // "path": "/testnewvol2/fsobuck11/",
+ // "size": 100,
+ // "accessCount": 701,
+ // "color": 1.0
+ // }
+ // ],
+ // "size": 100,
+ // "minAccessCount": 701,
+ // "maxAccessCount": 701
+ // },
+ // {
+ // "label": "fsobuck12",
+ // "path": "/testnewvol2/fsobuck12",
+ // "children": [
+ // {
+ // "label": "",
+ // "path": "/testnewvol2/fsobuck12/",
+ // "size": 100,
+ // "accessCount": 701,
+ // "color": 1.0
+ // }
+ // ],
+ // "size": 100,
+ // "minAccessCount": 701,
+ // "maxAccessCount": 701
+ // },
+ // {
+ // "label": "fsobuck13",
+ // "path": "/testnewvol2/fsobuck13",
+ // "children": [
+ // {
+ // "label": "",
+ // "path": "/testnewvol2/fsobuck13/",
+ // "size": 100,
+ // "accessCount": 701,
+ // "color": 1.0
+ // }
+ // ],
+ // "size": 100,
+ // "minAccessCount": 701,
+ // "maxAccessCount": 701
+ // },
+ // {
+ // "label": "obsbuck11",
+ // "path": "/testnewvol2/obsbuck11",
+ // "children": [
+ // {
+ // "label": "",
+ // "path": "/testnewvol2/obsbuck11/",
+ // "size": 107,
+ // "accessCount": 263,
+ // "color": 1.0
+ // }
+ // ],
+ // "size": 107,
+ // "minAccessCount": 263,
+ // "maxAccessCount": 263
+ // },
+ // {
+ // "label": "obsbuck12",
+ // "path": "/testnewvol2/obsbuck12",
+ // "children": [
+ // {
+ // "label": "",
+ // "path": "/testnewvol2/obsbuck12/",
+ // "size": 100,
+ // "accessCount": 200,
+ // "color": 1.0
+ // }
+ // ],
+ // "size": 100,
+ // "minAccessCount": 200,
+ // "maxAccessCount": 200
+ // },
+ // {
+ // "label": "obsbuck13",
+ // "path": "/testnewvol2/obsbuck13",
+ // "children": [
+ // {
+ // "label": "",
+ // "path": "/testnewvol2/obsbuck13/",
+ // "size": 100,
+ // "accessCount": 200,
+ // "color": 1.0
+ // }
+ // ],
+ // "size": 100,
+ // "minAccessCount": 200,
+ // "maxAccessCount": 200
+ // }
+ // ],
+ // "size": 607
+ // }
+ // ],
+ // "size": 607,
+ // "minAccessCount": 200,
+ // "maxAccessCount": 701
+ //}
+ EntityReadAccessHeatMapResponse entityReadAccessHeatMapResponse =
+ heatMapUtil.generateHeatMap(entityMetaDataList);
+ assertThat(
+
entityReadAccessHeatMapResponse.getChildren().size()).isGreaterThan(
+ 0);
+ assertEquals(2,
+ entityReadAccessHeatMapResponse.getChildren().size());
+ assertEquals(0.0,
+ entityReadAccessHeatMapResponse.getChildren().get(0).getColor());
+ String path =
+ entityReadAccessHeatMapResponse.getChildren().get(1).getChildren()
+ .get(0).getPath();
+ assertEquals("/testnewvol2/fsobuck11", path);
+ } else {
+ assertNull(entities);
+ }
}
}
}
diff --git
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/ResetDeletedBlockRetryCountSubcommand.java
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/ResetDeletedBlockRetryCountSubcommand.java
index 47a0ec2299..8123d5358a 100644
---
a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/ResetDeletedBlockRetryCountSubcommand.java
+++
b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/ResetDeletedBlockRetryCountSubcommand.java
@@ -16,13 +16,11 @@
*/
package org.apache.hadoop.ozone.admin.scm;
-import com.google.gson.Gson;
-import com.google.gson.JsonIOException;
-import com.google.gson.JsonSyntaxException;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
import org.apache.hadoop.hdds.scm.client.ScmClient;
import
org.apache.hadoop.hdds.scm.container.common.helpers.DeletedBlocksTransactionInfoWrapper;
+import org.apache.hadoop.hdds.server.JsonUtils;
import picocli.CommandLine;
import java.io.FileInputStream;
@@ -74,12 +72,11 @@ public class ResetDeletedBlockRetryCountSubcommand extends
ScmSubcommand {
if (group.resetAll) {
count = client.resetDeletedBlockRetryCount(new ArrayList<>());
} else if (group.fileName != null) {
- Gson gson = new Gson();
List<Long> txIDs;
try (InputStream in = new FileInputStream(group.fileName);
Reader fileReader = new InputStreamReader(in,
StandardCharsets.UTF_8)) {
- DeletedBlocksTransactionInfoWrapper[] txns = gson.fromJson(fileReader,
+ DeletedBlocksTransactionInfoWrapper[] txns =
JsonUtils.readFromReader(fileReader,
DeletedBlocksTransactionInfoWrapper[].class);
txIDs = Arrays.stream(txns)
.map(DeletedBlocksTransactionInfoWrapper::getTxID)
@@ -92,10 +89,12 @@ public class ResetDeletedBlockRetryCountSubcommand extends
ScmSubcommand {
System.out.println("The last loaded txID: " +
txIDs.get(txIDs.size() - 1));
}
- } catch (JsonIOException | JsonSyntaxException | IOException ex) {
- System.out.println("Cannot parse the file " + group.fileName);
- throw new IOException(ex);
+ } catch (IOException ex) {
+ final String message = "Failed to parse the file " + group.fileName +
": " + ex.getMessage();
+ System.out.println(message);
+ throw new IOException(message, ex);
}
+
count = client.resetDeletedBlockRetryCount(txIDs);
} else {
if (group.txList == null || group.txList.isEmpty()) {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]