From a43a9cfe1a14edebf9c065829bbdad6f204f0c26 Mon Sep 17 00:00:00 2001 From: Eli Collins Date: Thu, 10 May 2012 23:15:19 +0000 Subject: [PATCH] HDFS-3134. svn merge -c 1336943 from trunk git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1336944 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 + .../token/block/BlockTokenIdentifier.java | 3 +- .../hdfs/server/namenode/FSEditLogOp.java | 31 +++++++- .../hdfs/server/namenode/TestEditLog.java | 72 +++++++++++++++++++ 4 files changed, 106 insertions(+), 3 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index cc7b7a0ca5f..9a6989267d3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -305,6 +305,9 @@ Release 2.0.0 - UNRELEASED HDFS-3369. Rename {get|set|add}INode(..) methods in BlockManager and BlocksMap to {get|set|add}BlockCollection(..). (John George via szetszwo) + HDFS-3134. harden edit log loader against malformed or malicious input. + (Colin Patrick McCabe via eli) + OPTIMIZATIONS HDFS-2477. Optimize computing the diff between a block report and the diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java index c1fd3f9f826..62f2d762379 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenIdentifier.java @@ -148,7 +148,8 @@ public class BlockTokenIdentifier extends TokenIdentifier { userId = WritableUtils.readString(in); blockPoolId = WritableUtils.readString(in); blockId = WritableUtils.readVLong(in); - int length = WritableUtils.readVInt(in); + int length = WritableUtils.readVIntInRange(in, 0, + AccessMode.class.getEnumConstants().length); for (int i = 0; i < length; i++) { modes.add(WritableUtils.readEnum(in, AccessMode.class)); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java index 56a610f101e..9f7742cc674 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java @@ -203,6 +203,10 @@ public abstract class FSEditLogOp { } T setBlocks(Block[] blocks) { + if (blocks.length > MAX_BLOCKS) { + throw new RuntimeException("Can't have more than " + MAX_BLOCKS + + " in an AddCloseOp."); + } this.blocks = blocks; return (T)this; } @@ -296,10 +300,18 @@ public abstract class FSEditLogOp { } } + static final public int MAX_BLOCKS = 1024 * 1024 * 64; + private static Block[] readBlocks( DataInputStream in, int logVersion) throws IOException { int numBlocks = in.readInt(); + if (numBlocks < 0) { + throw new IOException("invalid negative number of blocks"); + } else if (numBlocks > MAX_BLOCKS) { + throw new IOException("invalid number of blocks: " + numBlocks + + ". The maximum number of blocks per file is " + MAX_BLOCKS); + } Block[] blocks = new Block[numBlocks]; for (int i = 0; i < numBlocks; i++) { Block blk = new Block(); @@ -579,6 +591,7 @@ public abstract class FSEditLogOp { String trg; String[] srcs; long timestamp; + final static public int MAX_CONCAT_SRC = 1024 * 1024; private ConcatDeleteOp() { super(OP_CONCAT_DELETE); @@ -594,7 +607,12 @@ public abstract class FSEditLogOp { } ConcatDeleteOp setSources(String[] srcs) { + if (srcs.length > MAX_CONCAT_SRC) { + throw new RuntimeException("ConcatDeleteOp can only have " + + MAX_CONCAT_SRC + " sources at most."); + } this.srcs = srcs; + return this; } @@ -624,8 +642,8 @@ public abstract class FSEditLogOp { if (!LayoutVersion.supports(Feature.EDITLOG_OP_OPTIMIZATION, logVersion)) { this.length = in.readInt(); if (length < 3) { // trg, srcs.., timestamp - throw new IOException("Incorrect data format. " - + "Concat delete operation."); + throw new IOException("Incorrect data format " + + "for ConcatDeleteOp."); } } this.trg = FSImageSerialization.readString(in); @@ -635,6 +653,15 @@ public abstract class FSEditLogOp { } else { srcSize = this.length - 1 - 1; // trg and timestamp } + if (srcSize < 0) { + throw new IOException("Incorrect data format. " + + "ConcatDeleteOp cannot have a negative number of data " + + " sources."); + } else if (srcSize > MAX_CONCAT_SRC) { + throw new IOException("Incorrect data format. " + + "ConcatDeleteOp can have at most " + MAX_CONCAT_SRC + + " sources, but we tried to have " + (length - 3) + " sources."); + } this.srcs = new String [srcSize]; for(int i=0; i