This is an automated email from the ASF dual-hosted git repository.
sarvekshayr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ozone.git
The following commit(s) were added to refs/heads/master by this push:
new c5f1dd35b53 HDDS-14857. Use XMLUtils.newSecure...Factory (#9951)
c5f1dd35b53 is described below
commit c5f1dd35b531ac8fcb62a35be9e693933b16f776
Author: Russole <[email protected]>
AuthorDate: Fri Mar 20 20:46:11 2026 +0800
HDDS-14857. Use XMLUtils.newSecure...Factory (#9951)
---
hadoop-hdds/config/pom.xml | 10 ++++++++++
.../java/org/apache/hadoop/hdds/conf/ConfigFileAppender.java | 8 +++-----
.../java/org/apache/hadoop/hdds/scm/net/NodeSchemaLoader.java | 5 ++---
.../java/org/apache/hadoop/hdds/utils/HttpServletUtils.java | 2 +-
.../org/apache/hadoop/hdds/scm/net/TestNodeSchemaLoader.java | 2 +-
.../apache/hadoop/ozone/s3/endpoint/MessageUnmarshaller.java | 5 ++---
6 files changed, 19 insertions(+), 13 deletions(-)
diff --git a/hadoop-hdds/config/pom.xml b/hadoop-hdds/config/pom.xml
index dca25492fff..1aa00a4dc45 100644
--- a/hadoop-hdds/config/pom.xml
+++ b/hadoop-hdds/config/pom.xml
@@ -26,6 +26,16 @@
<description>Apache Ozone Distributed Data Store Config Tools</description>
<dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>*</groupId>
+ <artifactId>*</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
diff --git
a/hadoop-hdds/config/src/main/java/org/apache/hadoop/hdds/conf/ConfigFileAppender.java
b/hadoop-hdds/config/src/main/java/org/apache/hadoop/hdds/conf/ConfigFileAppender.java
index 736c6e25028..8d621e2281a 100644
---
a/hadoop-hdds/config/src/main/java/org/apache/hadoop/hdds/conf/ConfigFileAppender.java
+++
b/hadoop-hdds/config/src/main/java/org/apache/hadoop/hdds/conf/ConfigFileAppender.java
@@ -22,7 +22,6 @@
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.stream.Collectors;
-import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
@@ -31,6 +30,7 @@
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
+import org.apache.hadoop.util.XMLUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
@@ -48,8 +48,7 @@ public class ConfigFileAppender {
public ConfigFileAppender() {
try {
- DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
- factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
+ DocumentBuilderFactory factory =
XMLUtils.newSecureDocumentBuilderFactory();
builder = factory.newDocumentBuilder();
} catch (Exception ex) {
throw new ConfigurationException("Can initialize new configuration", ex);
@@ -113,8 +112,7 @@ private void addXmlElement(Element parentElement, String
tagValue,
*/
public void write(Writer writer) {
try {
- TransformerFactory factory = TransformerFactory.newInstance();
- factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
+ TransformerFactory factory = XMLUtils.newSecureTransformerFactory();
Transformer transformer = factory.newTransformer();
transformer.setOutputProperty(OutputKeys.ENCODING,
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchemaLoader.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchemaLoader.java
index a0ea97fc56e..565fc62a8fd 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchemaLoader.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/net/NodeSchemaLoader.java
@@ -28,13 +28,13 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.io.FilenameUtils;
import org.apache.hadoop.hdds.scm.net.NodeSchema.LayerType;
import org.apache.hadoop.hdds.server.YamlUtils;
+import org.apache.hadoop.util.XMLUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
@@ -174,8 +174,7 @@ private NodeSchemaLoadResult loadSchema(InputStream
inputStream) throws
ParserConfigurationException, SAXException, IOException {
LOG.info("Loading network topology layer schema file");
// Read and parse the schema file.
- DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
- dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
+ DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
dbf.setIgnoringComments(true);
DocumentBuilder builder = dbf.newDocumentBuilder();
Document doc = builder.parse(inputStream);
diff --git
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HttpServletUtils.java
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HttpServletUtils.java
index 682777e1f20..11b933f3a97 100644
---
a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HttpServletUtils.java
+++
b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HttpServletUtils.java
@@ -109,7 +109,7 @@ private static void writeXmlError(String errorMessage,
Writer out) throws IOExce
root.setTextContent(errorMessage);
doc.appendChild(root);
- TransformerFactory transformerFactory = TransformerFactory.newInstance();
+ TransformerFactory transformerFactory =
XMLUtils.newSecureTransformerFactory();
Transformer transformer = transformerFactory.newTransformer();
transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
transformer.setOutputProperty(OutputKeys.STANDALONE, "no");
diff --git
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/scm/net/TestNodeSchemaLoader.java
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/scm/net/TestNodeSchemaLoader.java
index e3645beb8b0..6bcc8c21947 100644
---
a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/scm/net/TestNodeSchemaLoader.java
+++
b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/scm/net/TestNodeSchemaLoader.java
@@ -61,7 +61,7 @@ public static Stream<Arguments> getSchemaFiles() {
arguments("no-topology.xml", "no or multiple <topology> element"),
arguments("multiple-topology.xml", "no or multiple <topology>
element"),
arguments("invalid-version.xml", "Bad layoutversion value"),
- arguments("external-entity.xml", "accessExternalDTD")
+ arguments("external-entity.xml", "disallow-doctype-decl")
);
}
diff --git
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/MessageUnmarshaller.java
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/MessageUnmarshaller.java
index c032c895130..61a5ac8c921 100644
---
a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/MessageUnmarshaller.java
+++
b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/MessageUnmarshaller.java
@@ -28,10 +28,10 @@
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyReader;
-import javax.xml.XMLConstants;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.UnmarshallerHandler;
import javax.xml.parsers.SAXParserFactory;
+import org.apache.hadoop.util.XMLUtils;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
@@ -51,8 +51,7 @@ public MessageUnmarshaller(Class<T> cls) {
try {
context = JAXBContext.newInstance(cls);
- saxParserFactory = SAXParserFactory.newInstance();
- saxParserFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING,
true);
+ saxParserFactory = XMLUtils.newSecureSAXParserFactory();
} catch (Exception ex) {
throw new AssertionError("Can not instantiate XML parser for " +
cls.getSimpleName(), ex);
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]