HDFS-16795. Use secure XML parsers (#4979)

Move construction of XML parsers in HDFS
modules to using the locked-down parser factory
of HADOOP-18469.

Contributed by P J Fanning
This commit is contained in:
Steve Loughran 2022-10-20 17:48:58 +01:00 committed by GitHub
parent 5b7cbe2075
commit 75b04010a2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 20 additions and 17 deletions

View File

@ -20,6 +20,8 @@ package org.apache.hadoop.hdfs.util;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.util.XMLUtils;
import org.w3c.dom.Node;
import org.w3c.dom.Text;
import org.w3c.dom.Element;
@ -87,13 +89,8 @@ public class ECPolicyLoader {
LOG.info("Loading EC policy file " + policyFile);
// Read and parse the EC policy file.
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
dbf.setIgnoringComments(true);
dbf.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
dbf.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
dbf.setFeature("http://xml.org/sax/features/external-general-entities", false);
dbf.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
dbf.setFeature("http://apache.org/xml/features/dom/create-entity-ref-nodes", false);
DocumentBuilder builder = dbf.newDocumentBuilder();
Document doc = builder.parse(policyFile);
Element root = doc.getDocumentElement();

View File

@ -60,8 +60,8 @@ public class XmlEditsVisitor implements OfflineEditsVisitor {
public XmlEditsVisitor(OutputStream out)
throws IOException {
this.out = out;
factory =(SAXTransformerFactory)SAXTransformerFactory.newInstance();
try {
factory = org.apache.hadoop.util.XMLUtils.newSecureSAXTransformerFactory();
TransformerHandler handler = factory.newTransformerHandler();
handler.getTransformer().setOutputProperty(OutputKeys.METHOD, "xml");
handler.getTransformer().setOutputProperty(OutputKeys.ENCODING, "UTF-8");

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.ACL_ENTRY_NAME_MASK;
import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.ACL_ENTRY_NAME_OFFSET;
import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.ACL_ENTRY_SCOPE_OFFSET;
@ -56,6 +56,7 @@ import org.apache.hadoop.thirdparty.protobuf.TextFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.permission.AclEntry;
@ -147,6 +148,8 @@ class OfflineImageReconstructor {
InputStreamReader reader) throws XMLStreamException {
this.out = out;
XMLInputFactory factory = XMLInputFactory.newInstance();
factory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
factory.setProperty(XMLInputFactory.IS_SUPPORTING_EXTERNAL_ENTITIES, false);
this.events = factory.createXMLEventReader(reader);
this.sections = new HashMap<>();
this.sections.put(NameSectionProcessor.NAME, new NameSectionProcessor());

View File

@ -103,6 +103,7 @@ import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension.DelegationTokenExtension;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.XMLUtils;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.junit.After;
@ -150,7 +151,6 @@ import org.xml.sax.InputSource;
import org.xml.sax.helpers.DefaultHandler;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
public class TestEncryptionZones {
static final Logger LOG = Logger.getLogger(TestEncryptionZones.class);
@ -1731,7 +1731,7 @@ public class TestEncryptionZones {
PBImageXmlWriter v = new PBImageXmlWriter(new Configuration(), pw);
v.visit(new RandomAccessFile(originalFsimage, "r"));
final String xml = output.toString();
SAXParser parser = SAXParserFactory.newInstance().newSAXParser();
SAXParser parser = XMLUtils.newSecureSAXParserFactory().newSAXParser();
parser.parse(new InputSource(new StringReader(xml)), new DefaultHandler());
}

View File

@ -122,6 +122,8 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.util.XMLUtils;
import org.slf4j.event.Level;
import org.junit.AfterClass;
import org.junit.Assert;
@ -558,7 +560,7 @@ public class TestOfflineImageViewer {
try (RandomAccessFile r = new RandomAccessFile(originalFsimage, "r")) {
v.visit(r);
}
SAXParserFactory spf = SAXParserFactory.newInstance();
SAXParserFactory spf = XMLUtils.newSecureSAXParserFactory();
SAXParser parser = spf.newSAXParser();
final String xml = output.toString();
ECXMLHandler ecxmlHandler = new ECXMLHandler();
@ -984,13 +986,13 @@ public class TestOfflineImageViewer {
private void deleteINodeFromXML(File inputFile, File outputFile,
List<Long> corruptibleIds) throws Exception {
DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
DocumentBuilderFactory docFactory = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
Document doc = docBuilder.parse(inputFile);
properINodeDelete(corruptibleIds, doc);
TransformerFactory transformerFactory = TransformerFactory.newInstance();
TransformerFactory transformerFactory = XMLUtils.newSecureTransformerFactory();
Transformer transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(doc);
StreamResult result = new StreamResult(outputFile);
@ -1323,10 +1325,9 @@ public class TestOfflineImageViewer {
v.visit(new RandomAccessFile(originalFsimage, "r"));
final String xml = output.toString();
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
InputSource is = new InputSource(new StringReader(xml));
Document dom = db.parse(is);
NodeList ecSection = dom.getElementsByTagName(ERASURE_CODING_SECTION_NAME);
assertEquals(1, ecSection.getLength());

View File

@ -48,6 +48,8 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.XMLUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@ -220,7 +222,7 @@ public class TestOfflineImageViewerForAcl {
PrintStream o = new PrintStream(output);
PBImageXmlWriter v = new PBImageXmlWriter(new Configuration(), o);
v.visit(new RandomAccessFile(originalFsimage, "r"));
SAXParserFactory spf = SAXParserFactory.newInstance();
SAXParserFactory spf = XMLUtils.newSecureSAXParserFactory();
SAXParser parser = spf.newSAXParser();
final String xml = output.toString();
parser.parse(new InputSource(new StringReader(xml)), new DefaultHandler());