diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 329a91e24f8..e3f3cec1828 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -360,6 +360,9 @@ Release 2.3.0 - UNRELEASED HADOOP-9897. Add method to get path start position without drive specifier in o.a.h.fs.Path. (Binglin Chang via cnauroth) + HADOOP-9078. enhance unit-test coverage of class + org.apache.hadoop.fs.FileContext (Ivan A. Veselovsky via jeagles) + OPTIMIZATIONS HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn) @@ -431,6 +434,9 @@ Release 2.2.1 - UNRELEASED HADOOP-10040. hadoop.cmd in UNIX format and would not run by default on Windows. (cnauroth) + HADOOP-10055. FileSystemShell.apt.vm doc has typo "numRepicas". + (Akira Ajisaka via cnauroth) + Release 2.2.0 - 2013-10-13 INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SnapshotCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SnapshotCommands.java index 570e442c282..440d480cc90 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SnapshotCommands.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SnapshotCommands.java @@ -68,7 +68,7 @@ class SnapshotCommands extends FsCommand { throw new IllegalArgumentException(" is missing."); } if (args.size() > 2) { - throw new IllegalArgumentException("Too many arguements."); + throw new IllegalArgumentException("Too many arguments."); } if (args.size() == 2) { snapshotName = args.removeLast(); @@ -110,7 +110,7 @@ class SnapshotCommands extends FsCommand { @Override protected void processOptions(LinkedList args) throws IOException { if (args.size() != 2) { - throw new IOException("args number not 2: " + args.size()); + throw new IllegalArgumentException("Incorrect number of arguments."); } snapshotName = args.removeLast(); } @@ -150,7 +150,7 @@ class SnapshotCommands extends FsCommand { @Override protected void processOptions(LinkedList args) throws IOException { if (args.size() != 3) { - throw new IOException("args number not 3: " + args.size()); + throw new IllegalArgumentException("Incorrect number of arguments."); } newName = args.removeLast(); oldName = args.removeLast(); diff --git a/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm b/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm index 78cd880a67a..3e6fd21070a 100644 --- a/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm +++ b/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm @@ -381,7 +381,7 @@ rmr setrep - Usage: << >>> + Usage: << >>> Changes the replication factor of a file. If is a directory then the command recursively changes the replication factor of all files under diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java index 354f7aabfd6..e872176cd97 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java @@ -22,6 +22,7 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.EnumSet; +import java.util.NoSuchElementException; import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.HadoopIllegalArgumentException; @@ -30,6 +31,7 @@ import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.permission.FsPermission; import org.junit.After; import org.junit.Assert; +import static org.junit.Assert.*; import org.junit.Before; import org.junit.Test; @@ -92,7 +94,7 @@ public abstract class FileContextMainOperationsBaseTest { } }; - private static byte[] data = getFileData(numBlocks, + private static final byte[] data = getFileData(numBlocks, getDefaultBlockSize()); @Before @@ -107,7 +109,8 @@ public abstract class FileContextMainOperationsBaseTest { @After public void tearDown() throws Exception { - fc.delete(new Path(fileContextTestHelper.getAbsoluteTestRootPath(fc), new Path("test")), true); + boolean del = fc.delete(new Path(fileContextTestHelper.getAbsoluteTestRootPath(fc), new Path("test")), true); + assertTrue(del); fc.delete(localFsRootPath, true); } @@ -194,6 +197,14 @@ public abstract class FileContextMainOperationsBaseTest { fc.setWorkingDirectory(absoluteDir); Assert.assertEquals(absoluteDir, fc.getWorkingDirectory()); + Path aRegularFile = new Path("aRegularFile"); + createFile(aRegularFile); + try { + fc.setWorkingDirectory(aRegularFile); + fail("An IOException expected."); + } catch (IOException ioe) { + // okay + } } @Test @@ -1195,6 +1206,136 @@ public abstract class FileContextMainOperationsBaseTest { return true; } return false; + } + + @Test + public void testOpen2() throws IOException { + final Path rootPath = getTestRootPath(fc, "test"); + //final Path rootPath = getAbsoluteTestRootPath(fc); + final Path path = new Path(rootPath, "zoo"); + createFile(path); + final long length = fc.getFileStatus(path).getLen(); + FSDataInputStream fsdis = fc.open(path, 2048); + try { + byte[] bb = new byte[(int)length]; + fsdis.readFully(bb); + assertArrayEquals(data, bb); + } finally { + fsdis.close(); + } + } + + @Test + public void testSetVerifyChecksum() throws IOException { + final Path rootPath = getTestRootPath(fc, "test"); + final Path path = new Path(rootPath, "zoo"); + + FSDataOutputStream out = fc.create(path, EnumSet.of(CREATE), + Options.CreateOpts.createParent()); + try { + // instruct FS to verify checksum through the FileContext: + fc.setVerifyChecksum(true, path); + out.write(data, 0, data.length); + } finally { + out.close(); + } + + // NB: underlying FS may be different (this is an abstract test), + // so we cannot assert .zoo.crc existence. + // Instead, we check that the file is read correctly: + FileStatus fileStatus = fc.getFileStatus(path); + final long len = fileStatus.getLen(); + assertTrue(len == data.length); + byte[] bb = new byte[(int)len]; + FSDataInputStream fsdis = fc.open(path); + try { + fsdis.read(bb); + } finally { + fsdis.close(); + } + assertArrayEquals(data, bb); + } + + @Test + public void testListCorruptFileBlocks() throws IOException { + final Path rootPath = getTestRootPath(fc, "test"); + final Path path = new Path(rootPath, "zoo"); + createFile(path); + try { + final RemoteIterator remoteIterator = fc + .listCorruptFileBlocks(path); + if (listCorruptedBlocksSupported()) { + assertTrue(remoteIterator != null); + Path p; + while (remoteIterator.hasNext()) { + p = remoteIterator.next(); + System.out.println("corrupted block: " + p); + } + try { + remoteIterator.next(); + fail(); + } catch (NoSuchElementException nsee) { + // okay + } + } else { + fail(); + } + } catch (UnsupportedOperationException uoe) { + if (listCorruptedBlocksSupported()) { + fail(uoe.toString()); + } else { + // okay + } + } + } + + protected abstract boolean listCorruptedBlocksSupported(); + + @Test + public void testDeleteOnExitUnexisting() throws IOException { + final Path rootPath = getTestRootPath(fc, "test"); + final Path path = new Path(rootPath, "zoo"); + boolean registered = fc.deleteOnExit(path); + // because "zoo" does not exist: + assertTrue(!registered); + } + + @Test + public void testFileContextStatistics() throws IOException { + FileContext.clearStatistics(); + + final Path rootPath = getTestRootPath(fc, "test"); + final Path path = new Path(rootPath, "zoo"); + createFile(path); + byte[] bb = new byte[data.length]; + FSDataInputStream fsdis = fc.open(path); + try { + fsdis.read(bb); + } finally { + fsdis.close(); + } + assertArrayEquals(data, bb); + + FileContext.printStatistics(); + } + + @Test + /* + * Test method + * org.apache.hadoop.fs.FileContext.getFileContext(AbstractFileSystem) + */ + public void testGetFileContext1() throws IOException { + final Path rootPath = getTestRootPath(fc, "test"); + AbstractFileSystem asf = fc.getDefaultFileSystem(); + // create FileContext using the protected #getFileContext(1) method: + FileContext fc2 = FileContext.getFileContext(asf); + // Now just check that this context can do something reasonable: + final Path path = new Path(rootPath, "zoo"); + FSDataOutputStream out = fc2.create(path, EnumSet.of(CREATE), + Options.CreateOpts.createParent()); + out.close(); + Path pathResolved = fc2.resolvePath(path); + assertEquals(pathResolved.toUri().getPath(), path.toUri().getPath()); } private Path getTestRootPath(FileContext fc, String pathString) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java index 68bfacebf43..d17305bc904 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java @@ -35,6 +35,7 @@ import org.junit.Test; import static org.apache.hadoop.fs.FileContextTestHelper.*; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; /** *

@@ -174,6 +175,13 @@ public abstract class FileContextPermissionBase { System.out.println("Not testing changing the group since user " + "belongs to only one group."); } + + try { + fc.setOwner(f, null, null); + fail("Exception expected."); + } catch (IllegalArgumentException iae) { + // okay + } } finally {cleanupFile(fc, f);} } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java index e3e34c5082f..493131c06a9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextMainOperations.java @@ -49,6 +49,11 @@ public class TestLocalFSFileContextMainOperations extends FileContextMainOperati FileContext fc1 = FileContext.getLocalFSFileContext(); Assert.assertTrue(fc1 != fc); } + + @Override + protected boolean listCorruptedBlocksSupported() { + return false; + } @Test public void testDefaultFilePermission() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java index 61e0322491a..64520e1ba26 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java @@ -18,13 +18,9 @@ package org.apache.hadoop.fs.viewfs; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileContextMainOperationsBaseTest; -import org.apache.hadoop.fs.FileContextTestHelper; -import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.viewfs.ConfigUtil; import org.junit.After; import org.junit.Before; @@ -49,4 +45,9 @@ public class TestFcMainOperationsLocalFs extends super.tearDown(); ViewFsTestSetup.tearDownForViewFsLocalFs(fileContextTestHelper); } + + @Override + protected boolean listCorruptedBlocksSupported() { + return false; + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index f5f041e53b0..c7c5e044cb1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -255,7 +255,12 @@ Release 2.3.0 - UNRELEASED HDFS-5342. Provide more information in the FSNamesystem JMX interfaces. (Haohui Mai via jing9) - + + HDFS-5334. Implement dfshealth.jsp in HTML pages. (Haohui Mai via jing9) + + HDFS-5379. Update links to datanode information in dfshealth.html. (Haohui + Mai via jing9) + IMPROVEMENTS HDFS-5267. Remove volatile from LightWeightHashSet. (Junping Du via llu) @@ -352,6 +357,13 @@ Release 2.3.0 - UNRELEASED HDFS-5283. Under construction blocks only inside snapshots should not be counted in safemode threshhold. (Vinay via szetszwo) + HDFS-4376. Fix race conditions in Balancer. (Junping Du via szetszwo) + + HDFS-5375. hdfs.cmd does not expose several snapshot commands. (cnauroth) + + HDFS-5336. DataNode should not output 'StartupProgress' metrics. + (Akira Ajisaka via cnauroth) + Release 2.2.1 - UNRELEASED INCOMPATIBLE CHANGES @@ -360,6 +372,9 @@ Release 2.2.1 - UNRELEASED IMPROVEMENTS + HDFS-5360. Improvement of usage message of renameSnapshot and + deleteSnapshot. (Shinichi Yamashita via wang) + OPTIMIZATIONS BUG FIXES @@ -3674,6 +3689,9 @@ Release 0.23.10 - UNRELEASED HDFS-5010. Reduce the frequency of getCurrentUser() calls from namenode (kihwal) + HDFS-5346. Avoid unnecessary call to getNumLiveDataNodes() for each block + during IBR processing (Ravi Prakash via kihwal) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml index 2160d83bac3..2dd37ce965e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml @@ -542,6 +542,9 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> src/main/docs/releasenotes.html src/contrib/** src/site/resources/images/* + src/main/webapps/static/dust-full-2.0.0.min.js + src/main/webapps/static/dust-helpers-1.1.1.min.js + src/main/webapps/hdfs/dfshealth.dust.html diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs.cmd b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs.cmd index 70af80c7d54..503c2680ead 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs.cmd +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs.cmd @@ -123,6 +123,14 @@ goto :eof set CLASS=org.apache.hadoop.hdfs.tools.GetGroups goto :eof +:snapshotDiff + set CLASS=org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff + goto :eof + +:lsSnapshottableDir + set CLASS=org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir + goto :eof + @rem This changes %1, %2 etc. Hence those cannot be used after calling this. :make_command_arguments if "%1" == "--config" ( @@ -164,7 +172,10 @@ goto :eof @echo fetchdt fetch a delegation token from the NameNode @echo getconf get config values from configuration @echo groups get the groups which users belong to - @echo Use -help to see options + @echo snapshotDiff diff two snapshots of a directory or diff the + @echo current directory contents with a snapshot + @echo lsSnapshottableDir list all snapshottable dirs owned by the current user + @echo Use -help to see options @echo. @echo Most commands print help when invoked w/o parameters. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java index d512efe11cd..23d64b4c4c7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Balancer.java @@ -506,7 +506,7 @@ public class Balancer { final DatanodeInfo datanode; final double utilization; final long maxSize2Move; - protected long scheduledSize = 0L; + private long scheduledSize = 0L; // blocks being moved but not confirmed yet private List pendingBlocks = new ArrayList(MAX_NUM_CONCURRENT_MOVES); @@ -555,20 +555,35 @@ public class Balancer { } /** Decide if still need to move more bytes */ - protected boolean hasSpaceForScheduling() { + protected synchronized boolean hasSpaceForScheduling() { return scheduledSize0 && + while(!isTimeUp && getScheduledSize()>0 && (!srcBlockList.isEmpty() || blocksToReceive>0)) { PendingBlockMove pendingBlock = chooseNextBlockToMove(); if (pendingBlock != null) { @@ -779,7 +795,7 @@ public class Balancer { // in case no blocks can be moved for source node's task, // jump out of while-loop after 5 iterations. if (noPendingBlockIteration >= MAX_NO_PENDING_BLOCK_ITERATIONS) { - scheduledSize = 0; + setScheduledSize(0); } } @@ -981,7 +997,7 @@ public class Balancer { long bytesToMove = 0L; for (Source src : sources) { - bytesToMove += src.scheduledSize; + bytesToMove += src.getScheduledSize(); } return bytesToMove; } @@ -1082,7 +1098,7 @@ public class Balancer { bytesMoved += bytes; } - private long get() { + private synchronized long get() { return bytesMoved; } }; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index 5217f31006e..756397ec7ac 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -4613,7 +4613,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats, */ private boolean needEnter() { return (threshold != 0 && blockSafe < blockThreshold) || - (getNumLiveDataNodes() < datanodeThreshold) || + (datanodeThreshold != 0 && getNumLiveDataNodes() < datanodeThreshold) || (!nameNodeHasResourcesAvailable()); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java index 4e1b9cd77b3..ca31d5f9089 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java @@ -270,10 +270,6 @@ public class NameNode implements NameNodeStatusMXBean { static NameNodeMetrics metrics; private static final StartupProgress startupProgress = new StartupProgress(); - static { - StartupProgressMetrics.register(startupProgress); - } - /** Return the {@link FSNamesystem} object. * @return {@link FSNamesystem} object. */ @@ -485,6 +481,7 @@ public class NameNode implements NameNodeStatusMXBean { loginAsNameNodeUser(conf); NameNode.initMetrics(conf, this.getRole()); + StartupProgressMetrics.register(startupProgress); if (NamenodeRole.NAMENODE == role) { startHttpServer(conf); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfs-dust.js b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfs-dust.js new file mode 100644 index 00000000000..b9febf24fae --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfs-dust.js @@ -0,0 +1,116 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +(function ($, dust, exports) { + "use strict"; + + var filters = { + 'fmt_bytes': function (v) { + var UNITS = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'ZB']; + var prev = 0, i = 0; + while (Math.floor(v) > 0 && i < UNITS.length) { + prev = v; + v /= 1024; + i += 1; + } + + if (i > 0 && i < UNITS.length) { + v = prev; + i -= 1; + } + return Math.round(v * 100) / 100 + ' ' + UNITS[i]; + }, + + 'fmt_percentage': function (v) { + return Math.round(v * 100) / 100 + '%'; + }, + + 'fmt_time': function (v) { + var s = Math.floor(v / 1000), h = Math.floor(s / 3600); + s -= h * 3600; + var m = Math.floor(s / 60); + s -= m * 60; + + var res = s + " sec"; + if (m !== 0) { + res = m + " mins, " + res; + } + + if (h !== 0) { + res = h + " hrs, " + res; + } + + return res; + } + }; + $.extend(dust.filters, filters); + + /** + * Load templates from external sources in sequential orders, and + * compile them. The loading order is important to resolve dependency. + * + * The code compile the templates on the client sides, which should be + * precompiled once we introduce the infrastructure in the building + * system. + * + * templates is an array of tuples in the format of {url, name}. + */ + function load_templates(dust, templates, success_cb, error_cb) { + if (templates.length === 0) { + success_cb(); + return; + } + + var t = templates.shift(); + $.get(t.url, function (tmpl) { + var c = dust.compile(tmpl, t.name); + dust.loadSource(c); + load_templates(dust, templates, success_cb, error_cb); + }).error(function (jqxhr, text, err) { + error_cb(t.url, jqxhr, text, err); + }); + } + + /** + * Load a sequence of JSON. + * + * beans is an array of tuples in the format of {url, name}. + */ + function load_json(beans, success_cb, error_cb) { + var data = {}, error = false, to_be_completed = beans.length; + + $.each(beans, function(idx, b) { + if (error) { + return false; + } + $.get(b.url, function (resp) { + data[b.name] = resp; + to_be_completed -= 1; + if (to_be_completed === 0) { + success_cb(data); + } + }).error(function (jqxhr, text, err) { + error = true; + error_cb(b.url, jqxhr, text, err); + }); + }); + } + + exports.load_templates = load_templates; + exports.load_json = load_json; + +}($, dust, window)); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.dust.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.dust.html new file mode 100644 index 00000000000..9924825ea55 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.dust.html @@ -0,0 +1,265 @@ +

+ +{#nn} +{@if cond="{DistinctVersionCount} > 1"} +
+ + + There are {DistinctVersionCount} versions of datanodes currently live: + {#DistinctVersions} + {key} ({value}) {@sep},{/sep} + {/DistinctVersions} +
+{/if} + +{@if cond="{NumberOfMissingBlocks} > 0"} +
+ + +

There are {NumberOfMissingBlocks} missing blocks. The following files may be corrupted:

+
+
+ {#CorruptFiles} + {.}
+ {/CorruptFiles} +
+

Please check the logs or run fsck in order to identify the missing blocks. See the Hadoop FAQ for common causes and potential solutions.

+
+{/if} +{/nn} + +
+
Overview
+
+ {#nn} + + + + + + +
Started:{NNStarted}
Version:{Version}
Compiled:{CompileInfo}
Cluster ID:{ClusterId}
Block Pool ID:{BlockPoolId}
+ {/nn} +
+
+ +Browse the filesystem NameNode Logs + +
+ +
+
Cluster Summary
+
+ +

+ Security is {#nnstat}{#SecurityModeEnabled}on{:else}off{/SecurityModeEnabled}{/nnstat}.

+

{#nn}{#Safemode}{.}{:else}Safemode is off.{/Safemode}{/nn}

+ +

+ {#fs} + {TotalLoad} files and directories, {BlocksTotal} blocks = {FilesTotal} total filesystem object(s). + {#helper_fs_max_objects/} + {/fs} +

+ {#mem.HeapMemoryUsage} +

Heap Memory used {used|fmt_bytes} of {committed|fmt_bytes} Heap Memory. Max Heap Memory is {max|fmt_bytes}.

+ {/mem.HeapMemoryUsage} + + {#mem.NonHeapMemoryUsage} +

Non Heap Memory used {used|fmt_bytes} of {committed|fmt_bytes} Commited Non Heap Memory. Max Non Heap Memory is {max|fmt_bytes}.

+ {/mem.NonHeapMemoryUsage} + + {#nn} + + + + + + + + + + + + {/nn} + + {#fs} + + + + + {/fs} +
Configured Capacity:{Total|fmt_bytes}
DFS Used:{Used|fmt_bytes}
Non DFS Used:{NonDfsUsedSpace|fmt_bytes}
DFS Remaining:{Free|fmt_bytes}
DFS Used%:{PercentUsed|fmt_percentage}
DFS Remaining%:{PercentRemaining|fmt_percentage}
Block Pool Used:{BlockPoolUsedSpace|fmt_bytes}
Block Pool Used%:{PercentBlockPoolUsed|fmt_percentage}
DataNodes usages% (Min/Median/Max/stdDev): {#NodeUsage.nodeUsage}{min} / {median} / {max} / {stdDev}{/NodeUsage.nodeUsage}
Live Nodes{NumLiveDataNodes} (Decommissioned: {NumDecomLiveDataNodes})
Dead Nodes{NumDeadDataNodes} (Decommissioned: {NumDecomDeadDataNodes})
Decommissioning Nodes{NumDecommissioningDataNodes}
Number of Under-Replicated Blocks{UnderReplicatedBlocks}
+
+
+ +
+
+
NameNode Journal Status
+
+

Current transaction ID: {nn.JournalTransactionInfo.LastAppliedOrWrittenTxId}

+ + + + + + {#nn.NameJournalStatus} + + {/nn.NameJournalStatus} + +
Journal ManagerState
{manager}{stream}
+
+
+ +
+
+
NameNode Storage
+
+ + + {#nn.NameDirStatuses} + {#active}{#helper_dir_status type="Active"/}{/active} + {#failed}{#helper_dir_status type="Failed"/}{/failed} + {/nn.NameDirStatuses} +
Storage DirectoryTypeState
+
+
+
+ +
+
Snapshot Summary
+
+ {#fs.SnapshotStats} + + + + + + + + + + +
Snapshottable directoriesSnapshotted directories
{SnapshottableDirectories}{Snapshots}
+ {/fs.SnapshotStats} +
+
+
+ +{#startup} +
+
Startup Progress
+
+

Elapsed Time: {elapsedTime|fmt_time}, Percent Complete: {percentComplete|fmt_percentage}

+ + + + + + + + + + {#phases} + + + + + + {#steps root_file=file} + + + + + + {/steps} + {/phases} +
PhaseCompletionElapsed Time
{desc} {file} {size|fmt_bytes}{percentComplete|fmt_percentage}{elapsedTime|fmt_time}
{stepDesc} {stepFile} {stepSize|fmt_bytes} ({count}/{total}){percentComplete|fmt_percentage}
+
+
+{/startup} + +
+
+
Datanode Information
+
+
+
Nodes in operation
+
+ + + + + + + + + + + + + + + + {#nn.LiveNodes} + + + + + + + + + + + + + {/nn.LiveNodes} + {#nn.DeadNodes} + + + + + + + + + + + + + {/nn.DeadNodes} +
NodeLast contactAdmin StateCapacityUsedNon DFS UsedRemainingBlocksBlock pool usedFailed Volumes
{name} ({xferaddr}){lastContact}{adminState}{capacity|fmt_bytes}{used|fmt_bytes}{nonDfsUsedSpace|fmt_bytes}{remaining|fmt_bytes}{numBlocks}{blockPoolUsed|fmt_bytes} ({blockPoolUsedPercent|fmt_percentage}){volfails}
{name} ({xferaddr}){lastContact}Dead{?decomissioned}, Decomissioned{/decomissioned}-------
+
+
+
+
Nodes being decomissioned
+
+ + + + + + + + + + + {#nn.DecomNodes} + + + + + + + + {/nn.DecomNodes} +
NodeLast contactUnder replicated blocksBlocks with no live replicasUnder Replicated Blocks
In files under construction
{name} ({xferaddr}){lastContact}{underReplicatedBlocks}{decommissionOnlyReplicas}{underReplicateInOpenFiles}
+
+
+
+
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html new file mode 100644 index 00000000000..6a7b57dcc47 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html @@ -0,0 +1,43 @@ + + + + + + +Namenode information + + +
+ +
+
+ +
+

Hadoop, 2013.

+ + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.js b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.js new file mode 100644 index 00000000000..852b8618449 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.js @@ -0,0 +1,156 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +(function () { + "use strict"; + + var data = {}; + function generate_browse_dn_link(info_http_addr, info_https_addr) { + var is_https = window.location.protocol === 'https:'; + var authority = is_https ? info_https_addr : info_http_addr; + + var nn_info_port = window.location.port; + if (nn_info_port === "") { + nn_info_port = is_https ? 443 : 80; + } + + var l = '//' + authority + '/browseDirectory.jsp?dir=%2F&namenodeInfoPort=' + + nn_info_port + '&nnaddr=' + data.nnstat.HostAndPort; + return l; + } + + function render() { + var helpers = { + 'helper_fs_max_objects': function (chunk, ctx, bodies, params) { + var o = ctx.current(); + if (o.MaxObjects > 0) { + chunk.write('(' + Math.round((o.FilesTotal + o.BlockTotal) / o.MaxObjects * 100) * 100 + ')%'); + } + }, + + 'helper_dir_status': function (chunk, ctx, bodies, params) { + var j = ctx.current(); + for (var i in j) { + chunk.write('' + i + '' + j[i] + '' + params.type + ''); + } + } + }; + + var base = dust.makeBase(helpers); + + var TEMPLATES = [ { 'name': 'dfshealth', 'url': 'dfshealth.dust.html' } ]; + + load_templates(dust, TEMPLATES, function() { + dust.render('dfshealth', base.push(data), function(err, out) { + + $('#panel').append(out); + + $('#browse-dir-first').click(function () { + var len = data.nn.LiveNodes.length; + if (len < 1) { + show_err_msg('Cannot browse the DFS since there are no live nodes available.'); + return false; + } + + var dn = data.nn.LiveNodes[Math.floor(Math.random() * len)]; + window.location.href = generate_browse_dn_link(dn.infoAddr, dn.infoSecureAddr); + }); + + $('.browse-dir-links').click(function () { + var http_addr = $(this).attr('info-http-addr'), https_addr = $(this).attr('info-https-addr'); + window.location.href = generate_browse_dn_link(http_addr, https_addr); + }); + }); + }, function () { + show_err_msg('Failed to load the page.'); + }); + } + + var BEANS = [ + {"name": "nn", "url": "/jmx?qry=Hadoop:service=NameNode,name=NameNodeInfo"}, + {"name": "nnstat", "url": "/jmx?qry=Hadoop:service=NameNode,name=NameNodeStatus"}, + {"name": "fs", "url": "/jmx?qry=Hadoop:service=NameNode,name=FSNamesystemState"}, + {"name": "mem", "url": "/jmx?qry=java.lang:type=Memory"}, + {"name": "startup", "url": "/startupProgress"} + ]; + + // Workarounds for the fact that JMXJsonServlet returns non-standard JSON strings + function data_workaround(d) { + function node_map_to_array(nodes) { + var res = []; + for (var n in nodes) { + var p = nodes[n]; + p.name = n; + res.push(p); + } + return res; + } + + function startup_progress_workaround(r) { + function rename_property(o, s, d) { + if (o[s] !== undefined) { + o[d] = o[s]; + delete o[s]; + } + } + r.percentComplete *= 100; + $.each(r.phases, function (idx, p) { + p.percentComplete *= 100; + $.each(p.steps, function (idx2, s) { + s.percentComplete *= 100; + // dust.js is confused by these optional keys in nested + // structure, rename them + rename_property(s, "desc", "stepDesc"); + rename_property(s, "file", "stepFile"); + rename_property(s, "size", "stepSize"); + }); + }); + return r; + } + + d.nn.JournalTransactionInfo = JSON.parse(d.nn.JournalTransactionInfo); + d.nn.NameJournalStatus = JSON.parse(d.nn.NameJournalStatus); + d.nn.NameDirStatuses = JSON.parse(d.nn.NameDirStatuses); + d.nn.NodeUsage = JSON.parse(d.nn.NodeUsage); + d.nn.LiveNodes = node_map_to_array(JSON.parse(d.nn.LiveNodes)); + d.nn.DeadNodes = node_map_to_array(JSON.parse(d.nn.DeadNodes)); + d.nn.DecomNodes = node_map_to_array(JSON.parse(d.nn.DecomNodes)); + d.nn.CorruptFiles = JSON.parse(d.nn.CorruptFiles); + + d.fs.SnapshotStats = JSON.parse(d.fs.SnapshotStats); + d.startup = startup_progress_workaround(d.startup); + return d; + } + + function show_err_msg(msg) { + $('#alert-panel-body').html(msg); + $('#alert-panel').show(); + } + + load_json( + BEANS, + function(d) { + for (var k in d) { + data[k] = k === "startup" ? d[k] : d[k].beans[0]; + } + data = data_workaround(data); + render(); + }, + function (url, jqxhr, text, err) { + show_err_msg('

Failed to retrieve data from ' + url + ', cause: ' + err + '

'); + }); +})(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-full-2.0.0.min.js b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-full-2.0.0.min.js new file mode 100644 index 00000000000..6025144869c --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-full-2.0.0.min.js @@ -0,0 +1 @@ +function getGlobal(){return function(){return this.dust}.call(null)}var dust={};(function(dust){function Context(e,t,n){this.stack=e,this.global=t,this.blocks=n}function Stack(e,t,n,r){this.tail=t,this.isObject=!dust.isArray(e)&&e&&typeof e=="object",this.head=e,this.index=n,this.of=r}function Stub(e){this.head=new Chunk(this),this.callback=e,this.out=""}function Stream(){this.head=new Chunk(this)}function Chunk(e,t,n){this.root=e,this.next=t,this.data=[],this.flushable=!1,this.taps=n}function Tap(e,t){this.head=e,this.tail=t}dust.helpers={},dust.cache={},dust.register=function(e,t){if(!e)return;dust.cache[e]=t},dust.render=function(e,t,n){var r=(new Stub(n)).head;dust.load(e,r,Context.wrap(t,e)).end()},dust.stream=function(e,t){var n=new Stream;return dust.nextTick(function(){dust.load(e,n.head,Context.wrap(t,e)).end()}),n},dust.renderSource=function(e,t,n){return dust.compileFn(e)(t,n)},dust.compileFn=function(e,t){var n=dust.loadSource(dust.compile(e,t));return function(e,r){var i=r?new Stub(r):new Stream;return dust.nextTick(function(){n(i.head,Context.wrap(e,t)).end()}),i}},dust.load=function(e,t,n){var r=dust.cache[e];return r?r(t,n):dust.onLoad?t.map(function(t){dust.onLoad(e,function(r,i){if(r)return t.setError(r);dust.cache[e]||dust.loadSource(dust.compile(i,e)),dust.cache[e](t,n).end()})}):t.setError(new Error("Template Not Found: "+e))},dust.loadSource=function(source,path){return eval(source)},Array.isArray?dust.isArray=Array.isArray:dust.isArray=function(e){return Object.prototype.toString.call(e)=="[object Array]"},dust.nextTick=function(){return typeof process!="undefined"?process.nextTick:function(e){setTimeout(e,0)}}(),dust.isEmpty=function(e){return dust.isArray(e)&&!e.length?!0:e===0?!1:!e},dust.filter=function(e,t,n){if(n)for(var r=0,i=n.length;r1)return undefined;s?(n=s.head,s=s.tail,o=0):e||(n=this.global,e=!0,o=0)}}return typeof n=="function"?function(){return n.apply(r,arguments)}:n},Context.prototype.push=function(e,t,n){return new Context(new Stack(e,this.stack,t,n),this.global,this.blocks)},Context.prototype.rebase=function(e){return new Context(new Stack(e),this.global,this.blocks)},Context.prototype.current=function(){return this.stack.head},Context.prototype.getBlock=function(e,t,n){typeof e=="function"&&(e=e(t,n).data.join(""),t.data=[]);var r=this.blocks;if(!r)return;var i=r.length,s;while(i--){s=r[i][e];if(s)return s}},Context.prototype.shiftBlocks=function(e){var t=this.blocks,n;return e?(t?n=t.concat([e]):n=[e],new Context(this.stack,this.global,n)):this},Stub.prototype.flush=function(){var e=this.head;while(e){if(!e.flushable){if(e.error){this.callback(e.error),this.flush=function(){};return}return}this.out+=e.data.join(""),e=e.next,this.head=e}this.callback(null,this.out)},Stream.prototype.flush=function(){var e=this.head;while(e){if(!e.flushable){if(e.error){this.emit("error",e.error),this.flush=function(){};return}return}this.emit("data",e.data.join("")),e=e.next,this.head=e}this.emit("end")},Stream.prototype.emit=function(e,t){if(!this.events)return!1;var n=this.events[e];if(!n)return!1;if(typeof n=="function")n(t);else{var r=n.slice(0);for(var i=0,s=r.length;i0){t.stack.head&&(t.stack.head.$len=o);for(var a=0;a\"\']/),AMP=/&/g,LT=//g,QUOT=/\"/g,SQUOT=/\'/g;dust.escapeHtml=function(e){return typeof e=="string"?HCHARS.test(e)?e.replace(AMP,"&").replace(LT,"<").replace(GT,">").replace(QUOT,""").replace(SQUOT,"'"):e:e};var BS=/\\/g,FS=/\//g,CR=/\r/g,LS=/\u2028/g,PS=/\u2029/g,NL=/\n/g,LF=/\f/g,SQ=/'/g,DQ=/"/g,TB=/\t/g;dust.escapeJs=function(e){return typeof e=="string"?e.replace(BS,"\\\\").replace(FS,"\\/").replace(DQ,'\\"').replace(SQ,"\\'").replace(CR,"\\r").replace(LS,"\\u2028").replace(PS,"\\u2029").replace(NL,"\\n").replace(LF,"\\f").replace(TB,"\\t"):e}})(dust),typeof exports!="undefined"&&(typeof process!="undefined"&&require("./server")(dust),module.exports=dust);var dustCompiler=function(e){function t(t){var n={};return e.filterNode(n,t)}function n(t,n){var r=[n[0]];for(var i=1,s=n.length;iu.offset&&(u=c(s),a=[]),a.push(e)}function d(){var e,t,n;n=c(s),e=[],t=v();while(t!==null)e.push(t),t=v();return e!==null&&(e=function(e,t,n,r){return["body"].concat(r)}(n.offset,n.line,n.column,e)),e===null&&(s=c(n)),e}function v(){var e;return e=I(),e===null&&(e=m(),e===null&&(e=x(),e===null&&(e=N(),e===null&&(e=S(),e===null&&(e=B()))))),e}function m(){var e,t,r,i,u,a,f,l,v;o++,l=c(s),v=c(s),e=g();if(e!==null){t=[],r=V();while(r!==null)t.push(r),r=V();t!==null?(r=U(),r!==null?(i=d(),i!==null?(u=E(),u!==null?(a=y(),a=a!==null?a:"",a!==null?(f=function(e,t,n,r,i,s,o){if(!o||r[1].text!==o.text)throw new Error("Expected end tag for "+r[1].text+" but it was not found. At line : "+t+", column : "+n);return!0}(s.offset,s.line,s.column,e,i,u,a)?"":null,f!==null?e=[e,t,r,i,u,a,f]:(e=null,s=c(v))):(e=null,s=c(v))):(e=null,s=c(v))):(e=null,s=c(v))):(e=null,s=c(v))):(e=null,s=c(v))}else e=null,s=c(v);e!==null&&(e=function(e,t,n,r,i,s,o){return s.push(["param",["literal","block"],i]),r.push(s),r}(l.offset,l.line,l.column,e[0],e[3],e[4],e[5])),e===null&&(s=c(l));if(e===null){l=c(s),v=c(s),e=g();if(e!==null){t=[],r=V();while(r!==null)t.push(r),r=V();t!==null?(n.charCodeAt(s.offset)===47?(r="/",h(s,1)):(r=null,o===0&&p('"/"')),r!==null?(i=U(),i!==null?e=[e,t,r,i]:(e=null,s=c(v))):(e=null,s=c(v))):(e=null,s=c(v))}else e=null,s=c(v);e!==null&&(e=function(e,t,n,r){return r.push(["bodies"]),r}(l.offset,l.line,l.column,e[0])),e===null&&(s=c(l))}return o--,o===0&&e===null&&p("section"),e}function g(){var e,t,r,i,u,a,f,l;f=c(s),l=c(s),e=R();if(e!==null){/^[#?^<+@%]/.test(n.charAt(s.offset))?(t=n.charAt(s.offset),h(s,1)):(t=null,o===0&&p("[#?^<+@%]"));if(t!==null){r=[],i=V();while(i!==null)r.push(i),i=V();r!==null?(i=C(),i!==null?(u=b(),u!==null?(a=w(),a!==null?e=[e,t,r,i,u,a]:(e=null,s=c(l))):(e=null,s=c(l))):(e=null,s=c(l))):(e=null,s=c(l))}else e=null,s=c(l)}else e=null,s=c(l);return e!==null&&(e=function(e,t,n,r,i,s,o){return[r,i,s,o]}(f.offset,f.line,f.column,e[1],e[3],e[4],e[5])),e===null&&(s=c(f)),e}function y(){var e,t,r,i,u,a,f,l;o++,f=c(s),l=c(s),e=R();if(e!==null){n.charCodeAt(s.offset)===47?(t="/",h(s,1)):(t=null,o===0&&p('"/"'));if(t!==null){r=[],i=V();while(i!==null)r.push(i),i=V();if(r!==null){i=C();if(i!==null){u=[],a=V();while(a!==null)u.push(a),a=V();u!==null?(a=U(),a!==null?e=[e,t,r,i,u,a]:(e=null,s=c(l))):(e=null,s=c(l))}else e=null,s=c(l)}else e=null,s=c(l)}else e=null,s=c(l)}else e=null,s=c(l);return e!==null&&(e=function(e,t,n,r){return r}(f.offset,f.line,f.column,e[3])),e===null&&(s=c(f)),o--,o===0&&e===null&&p("end tag"),e}function b(){var e,t,r,i,u;return r=c(s),i=c(s),u=c(s),n.charCodeAt(s.offset)===58?(e=":",h(s,1)):(e=null,o===0&&p('":"')),e!==null?(t=C(),t!==null?e=[e,t]:(e=null,s=c(u))):(e=null,s=c(u)),e!==null&&(e=function(e,t,n,r){return r}(i.offset,i.line,i.column,e[1])),e===null&&(s=c(i)),e=e!==null?e:"",e!==null&&(e=function(e,t,n,r){return r?["context",r]:["context"]}(r.offset,r.line,r.column,e)),e===null&&(s=c(r)),e}function w(){var e,t,r,i,u,a,f,l;o++,a=c(s),e=[],f=c(s),l=c(s),r=V();if(r!==null){t=[];while(r!==null)t.push(r),r=V()}else t=null;t!==null?(r=M(),r!==null?(n.charCodeAt(s.offset)===61?(i="=",h(s,1)):(i=null,o===0&&p('"="')),i!==null?(u=k(),u===null&&(u=C(),u===null&&(u=P())),u!==null?t=[t,r,i,u]:(t=null,s=c(l))):(t=null,s=c(l))):(t=null,s=c(l))):(t=null,s=c(l)),t!==null&&(t=function(e,t,n,r,i){return["param",["literal",r],i]}(f.offset,f.line,f.column,t[1],t[3])),t===null&&(s=c(f));while(t!==null){e.push(t),f=c(s),l=c(s),r=V();if(r!==null){t=[];while(r!==null)t.push(r),r=V()}else t=null;t!==null?(r=M(),r!==null?(n.charCodeAt(s.offset)===61?(i="=",h(s,1)):(i=null,o===0&&p('"="')),i!==null?(u=k(),u===null&&(u=C(),u===null&&(u=P())),u!==null?t=[t,r,i,u]:(t=null,s=c(l))):(t=null,s=c(l))):(t=null,s=c(l))):(t=null,s=c(l)),t!==null&&(t=function(e,t,n,r,i){return["param",["literal",r],i]}(f.offset,f.line,f.column,t[1],t[3])),t===null&&(s=c(f))}return e!==null&&(e=function(e,t,n,r){return["params"].concat(r)}(a.offset,a.line,a.column,e)),e===null&&(s=c(a)),o--,o===0&&e===null&&p("params"),e}function E(){var e,t,r,i,u,a,f,l,v;o++,f=c(s),e=[],l=c(s),v=c(s),t=R(),t!==null?(n.charCodeAt(s.offset)===58?(r=":",h(s,1)):(r=null,o===0&&p('":"')),r!==null?(i=M(),i!==null?(u=U(),u!==null?(a=d(),a!==null?t=[t,r,i,u,a]:(t=null,s=c(v))):(t=null,s=c(v))):(t=null,s=c(v))):(t=null,s=c(v))):(t=null,s=c(v)),t!==null&&(t=function(e,t,n,r,i){return["param",["literal",r],i]}(l.offset,l.line,l.column,t[2],t[4])),t===null&&(s=c(l));while(t!==null)e.push(t),l=c(s),v=c(s),t=R(),t!==null?(n.charCodeAt(s.offset)===58?(r=":",h(s,1)):(r=null,o===0&&p('":"')),r!==null?(i=M(),i!==null?(u=U(),u!==null?(a=d(),a!==null?t=[t,r,i,u,a]:(t=null,s=c(v))):(t=null,s=c(v))):(t=null,s=c(v))):(t=null,s=c(v))):(t=null,s=c(v)),t!==null&&(t=function(e,t,n,r,i){return["param",["literal",r],i]}(l.offset,l.line,l.column,t[2],t[4])),t===null&&(s=c(l));return e!==null&&(e=function(e,t,n,r){return["bodies"].concat(r)}(f.offset,f.line,f.column,e)),e===null&&(s=c(f)),o--,o===0&&e===null&&p("bodies"),e}function S(){var e,t,n,r,i,u;return o++,i=c(s),u=c(s),e=R(),e!==null?(t=C(),t!==null?(n=T(),n!==null?(r=U(),r!==null?e=[e,t,n,r]:(e=null,s=c(u))):(e=null,s=c(u))):(e=null,s=c(u))):(e=null,s=c(u)),e!==null&&(e=function(e,t,n,r,i){return["reference",r,i]}(i.offset,i.line,i.column,e[1],e[2])),e===null&&(s=c(i)),o--,o===0&&e===null&&p("reference"),e}function x(){var e,t,r,i,u,a,f,l,d,v,m,g;o++,v=c(s),m=c(s),e=R();if(e!==null){n.charCodeAt(s.offset)===62?(t=">",h(s,1)):(t=null,o===0&&p('">"')),t===null&&(n.charCodeAt(s.offset)===43?(t="+",h(s,1)):(t=null,o===0&&p('"+"')));if(t!==null){r=[],i=V();while(i!==null)r.push(i),i=V();if(r!==null){g=c(s),i=M(),i!==null&&(i=function(e,t,n,r){return["literal",r]}(g.offset,g.line,g.column,i)),i===null&&(s=c(g)),i===null&&(i=P());if(i!==null){u=b();if(u!==null){a=w();if(a!==null){f=[],l=V();while(l!==null)f.push(l),l=V();f!==null?(n.charCodeAt(s.offset)===47?(l="/",h(s,1)):(l=null,o===0&&p('"/"')),l!==null?(d=U(),d!==null?e=[e,t,r,i,u,a,f,l,d]:(e=null,s=c(m))):(e=null,s=c(m))):(e=null,s=c(m))}else e=null,s=c(m)}else e=null,s=c(m)}else e=null,s=c(m)}else e=null,s=c(m)}else e=null,s=c(m)}else e=null,s=c(m);return e!==null&&(e=function(e,t,n,r,i,s,o){var u=r===">"?"partial":r;return[u,i,s,o]}(v.offset,v.line,v.column,e[1],e[3],e[4],e[5])),e===null&&(s=c(v)),o--,o===0&&e===null&&p("partial"),e}function T(){var e,t,r,i,u,a;o++,i=c(s),e=[],u=c(s),a=c(s),n.charCodeAt(s.offset)===124?(t="|",h(s,1)):(t=null,o===0&&p('"|"')),t!==null?(r=M(),r!==null?t=[t,r]:(t=null,s=c(a))):(t=null,s=c(a)),t!==null&&(t=function(e,t,n,r){return r}(u.offset,u.line,u.column,t[1])),t===null&&(s=c(u));while(t!==null)e.push(t),u=c(s),a=c(s),n.charCodeAt(s.offset)===124?(t="|",h(s,1)):(t=null,o===0&&p('"|"')),t!==null?(r=M(),r!==null?t=[t,r]:(t=null,s=c(a))):(t=null,s=c(a)),t!==null&&(t=function(e,t,n,r){return r}(u.offset,u.line,u.column,t[1])),t===null&&(s=c(u));return e!==null&&(e=function(e,t,n,r){return["filters"].concat(r)}(i.offset,i.line,i.column,e)),e===null&&(s=c(i)),o--,o===0&&e===null&&p("filters"),e}function N(){var e,t,r,i,u,a;return o++,u=c(s),a=c(s),e=R(),e!==null?(n.charCodeAt(s.offset)===126?(t="~",h(s,1)):(t=null,o===0&&p('"~"')),t!==null?(r=M(),r!==null?(i=U(),i!==null?e=[e,t,r,i]:(e=null,s=c(a))):(e=null,s=c(a))):(e=null,s=c(a))):(e=null,s=c(a)),e!==null&&(e=function(e,t,n,r){return["special",r]}(u.offset,u.line,u.column,e[2])),e===null&&(s=c(u)),o--,o===0&&e===null&&p("special"),e}function C(){var e,t;return o++,t=c(s),e=O(),e!==null&&(e=function(e,t,n,r){var i=["path"].concat(r);return i.text=r[1].join("."),i}(t.offset,t.line,t.column,e)),e===null&&(s=c(t)),e===null&&(t=c(s),e=M(),e!==null&&(e=function(e,t,n,r){var i=["key",r];return i.text=r,i}(t.offset,t.line,t.column,e)),e===null&&(s=c(t))),o--,o===0&&e===null&&p("identifier"),e}function k(){var e,t;return o++,t=c(s),e=L(),e===null&&(e=A()),e!==null&&(e=function(e,t,n,r){return["literal",r]}(t.offset,t.line,t.column,e)),e===null&&(s=c(t)),o--,o===0&&e===null&&p("number"),e}function L(){var e,t,r,i,u,a;o++,u=c(s),a=c(s),e=A();if(e!==null){n.charCodeAt(s.offset)===46?(t=".",h(s,1)):(t=null,o===0&&p('"."'));if(t!==null){i=A();if(i!==null){r=[];while(i!==null)r.push(i),i=A()}else r=null;r!==null?e=[e,t,r]:(e=null,s=c(a))}else e=null,s=c(a)}else e=null,s=c(a);return e!==null&&(e=function(e,t,n,r,i){return parseFloat(r+"."+i.join(""))}(u.offset,u.line,u.column,e[0],e[2])),e===null&&(s=c(u)),o--,o===0&&e===null&&p("frac"),e}function A(){var e,t,r;o++,r=c(s),/^[0-9]/.test(n.charAt(s.offset))?(t=n.charAt(s.offset),h(s,1)):(t=null,o===0&&p("[0-9]"));if(t!==null){e=[];while(t!==null)e.push(t),/^[0-9]/.test(n.charAt(s.offset))?(t=n.charAt(s.offset),h(s,1)):(t=null,o===0&&p("[0-9]"))}else e=null;return e!==null&&(e=function(e,t,n,r){return parseInt(r.join(""),10)}(r.offset,r.line,r.column,e)),e===null&&(s=c(r)),o--,o===0&&e===null&&p("integer"),e}function O(){var e,t,r,i,u;o++,i=c(s),u=c(s),e=M(),e=e!==null?e:"";if(e!==null){r=D(),r===null&&(r=_());if(r!==null){t=[];while(r!==null)t.push(r),r=D(),r===null&&(r=_())}else t=null;t!==null?e=[e,t]:(e=null,s=c(u))}else e=null,s=c(u);e!==null&&(e=function(e,t,n,r,i){return i=i[0],r&&i?(i.unshift(r),[!1,i]):[!0,i]}(i.offset,i.line,i.column,e[0],e[1])),e===null&&(s=c(i));if(e===null){i=c(s),u=c(s),n.charCodeAt(s.offset)===46?(e=".",h(s,1)):(e=null,o===0&&p('"."'));if(e!==null){t=[],r=D(),r===null&&(r=_());while(r!==null)t.push(r),r=D(),r===null&&(r=_());t!==null?e=[e,t]:(e=null,s=c(u))}else e=null,s=c(u);e!==null&&(e=function(e,t,n,r){return r.length>0?[!0,r[0]]:[!0,[]]}(i.offset,i.line,i.column,e[1])),e===null&&(s=c(i))}return o--,o===0&&e===null&&p("path"),e}function M(){var e,t,r,i,u;o++,i=c(s),u=c(s),/^[a-zA-Z_$]/.test(n.charAt(s.offset))?(e=n.charAt(s.offset),h(s,1)):(e=null,o===0&&p("[a-zA-Z_$]"));if(e!==null){t=[],/^[0-9a-zA-Z_$\-]/.test(n.charAt(s.offset))?(r=n.charAt(s.offset),h(s,1)):(r=null,o===0&&p("[0-9a-zA-Z_$\\-]"));while(r!==null)t.push(r),/^[0-9a-zA-Z_$\-]/.test(n.charAt(s.offset))?(r=n.charAt(s.offset),h(s,1)):(r=null,o===0&&p("[0-9a-zA-Z_$\\-]"));t!==null?e=[e,t]:(e=null,s=c(u))}else e=null,s=c(u);return e!==null&&(e=function(e,t,n,r,i){return r+i.join("")}(i.offset,i.line,i.column,e[0],e[1])),e===null&&(s=c(i)),o--,o===0&&e===null&&p("key"),e}function _(){var e,t,r,i,u,a,f,l;o++,i=c(s),u=c(s),a=c(s),f=c(s),e=z();if(e!==null){l=c(s),/^[0-9]/.test(n.charAt(s.offset))?(r=n.charAt(s.offset),h(s,1)):(r=null,o===0&&p("[0-9]"));if(r!==null){t=[];while(r!==null)t.push(r),/^[0-9]/.test(n.charAt(s.offset))?(r=n.charAt(s.offset),h(s,1)):(r=null,o===0&&p("[0-9]"))}else t=null;t!==null&&(t=function(e,t,n,r){return r.join("")}(l.offset,l.line,l.column,t)),t===null&&(s=c(l)),t===null&&(t=C()),t!==null?(r=W(),r!==null?e=[e,t,r]:(e=null,s=c(f))):(e=null,s=c(f))}else e=null,s=c(f);return e!==null&&(e=function(e,t,n,r){return r}(a.offset,a.line,a.column,e[1])),e===null&&(s=c(a)),e!==null?(t=D(),t=t!==null?t:"",t!==null?e=[e,t]:(e=null,s=c(u))):(e=null,s=c(u)),e!==null&&(e=function(e,t,n,r,i){return i?i.unshift(r):i=[r],i}(i.offset,i.line,i.column,e[0],e[1])),e===null&&(s=c(i)),o--,o===0&&e===null&&p("array"),e}function D(){var e,t,r,i,u,a,f;o++,i=c(s),u=c(s),a=c(s),f=c(s),n.charCodeAt(s.offset)===46?(t=".",h(s,1)):(t=null,o===0&&p('"."')),t!==null?(r=M(),r!==null?t=[t,r]:(t=null,s=c(f))):(t=null,s=c(f)),t!==null&&(t=function(e,t,n,r){return r}(a.offset,a.line,a.column,t[1])),t===null&&(s=c(a));if(t!==null){e=[];while(t!==null)e.push(t),a=c(s),f=c(s),n.charCodeAt(s.offset)===46?(t=".",h(s,1)):(t=null,o===0&&p('"."')),t!==null?(r=M(),r!==null?t=[t,r]:(t=null,s=c(f))):(t=null,s=c(f)),t!==null&&(t=function(e,t,n,r){return r}(a.offset,a.line,a.column,t[1])),t===null&&(s=c(a))}else e=null;return e!==null?(t=_(),t=t!==null?t:"",t!==null?e=[e,t]:(e=null,s=c(u))):(e=null,s=c(u)),e!==null&&(e=function(e,t,n,r,i){return i?r.concat(i):r}(i.offset,i.line,i.column,e[0],e[1])),e===null&&(s=c(i)),o--,o===0&&e===null&&p("array_part"),e}function P(){var e,t,r,i,u;o++,i=c(s),u=c(s),n.charCodeAt(s.offset)===34?(e='"',h(s,1)):(e=null,o===0&&p('"\\""')),e!==null?(n.charCodeAt(s.offset)===34?(t='"',h(s,1)):(t=null,o===0&&p('"\\""')),t!==null?e=[e,t]:(e=null,s=c(u))):(e=null,s=c(u)),e!==null&&(e=function(e,t,n){return["literal",""]}(i.offset,i.line,i.column)),e===null&&(s=c(i));if(e===null){i=c(s),u=c(s),n.charCodeAt(s.offset)===34?(e='"',h(s,1)):(e=null,o===0&&p('"\\""')),e!==null?(t=j(),t!==null?(n.charCodeAt(s.offset)===34?(r='"',h(s,1)):(r=null,o===0&&p('"\\""')),r!==null?e=[e,t,r]:(e=null,s=c(u))):(e=null,s=c(u))):(e=null,s=c(u)),e!==null&&(e=function(e,t,n,r){return["literal",r]}(i.offset,i.line,i.column,e[1])),e===null&&(s=c(i));if(e===null){i=c(s),u=c(s),n.charCodeAt(s.offset)===34?(e='"',h(s,1)):(e=null,o===0&&p('"\\""'));if(e!==null){r=H();if(r!==null){t=[];while(r!==null)t.push(r),r=H()}else t=null;t!==null?(n.charCodeAt(s.offset)===34?(r='"',h(s,1)):(r=null,o===0&&p('"\\""')),r!==null?e=[e,t,r]:(e=null,s=c(u))):(e=null,s=c(u))}else e=null,s=c(u);e!==null&&(e=function(e,t,n,r){return["body"].concat(r)}(i.offset,i.line,i.column,e[1])),e===null&&(s=c(i))}}return o--,o===0&&e===null&&p("inline"),e}function H(){var e,t;return e=N(),e===null&&(e=S(),e===null&&(t=c(s),e=j(),e!==null&&(e=function(e,t,n,r){return["buffer",r]}(t.offset,t.line,t.column,e)),e===null&&(s=c(t)))),e}function B(){var e,t,r,i,u,a,f,l,d;o++,a=c(s),f=c(s),e=X();if(e!==null){t=[],r=V();while(r!==null)t.push(r),r=V();t!==null?e=[e,t]:(e=null,s=c(f))}else e=null,s=c(f);e!==null&&(e=function(e,t,n,r,i){return["format",r,i.join("")]}(a.offset,a.line,a.column,e[0],e[1])),e===null&&(s=c(a));if(e===null){a=c(s),f=c(s),l=c(s),d=c(s),o++,t=q(),o--,t===null?t="":(t=null,s=c(d)),t!==null?(d=c(s),o++,r=I(),o--,r===null?r="":(r=null,s=c(d)),r!==null?(d=c(s),o++,i=X(),o--,i===null?i="":(i=null,s=c(d)),i!==null?(n.length>s.offset?(u=n.charAt(s.offset),h(s,1)):(u=null,o===0&&p("any character")),u!==null?t=[t,r,i,u]:(t=null,s=c(l))):(t=null,s=c(l))):(t=null,s=c(l))):(t=null,s=c(l)),t!==null&&(t=function(e,t,n,r){return r}(f.offset,f.line,f.column,t[3])),t===null&&(s=c(f));if(t!==null){e=[];while(t!==null)e.push(t),f=c(s),l=c(s),d=c(s),o++,t=q(),o--,t===null?t="":(t=null,s=c(d)),t!==null?(d=c(s),o++,r=I(),o--,r===null?r="":(r=null,s=c(d)),r!==null?(d=c(s),o++,i=X(),o--,i===null?i="":(i=null,s=c(d)),i!==null?(n.length>s.offset?(u=n.charAt(s.offset),h(s,1)):(u=null,o===0&&p("any character")),u!==null?t=[t,r,i,u]:(t=null,s=c(l))):(t=null,s=c(l))):(t=null,s=c(l))):(t=null,s=c(l)),t!==null&&(t=function(e,t,n,r){return r}(f.offset,f.line,f.column,t[3])),t===null&&(s=c(f))}else e=null;e!==null&&(e=function(e,t,n,r){return["buffer",r.join("")]}(a.offset,a.line,a.column,e)),e===null&&(s=c(a))}return o--,o===0&&e===null&&p("buffer"),e}function j(){var e,t,r,i,u,a,f;o++,i=c(s),u=c(s),a=c(s),f=c(s),o++,t=q(),o--,t===null?t="":(t=null,s=c(f)),t!==null?(r=F(),r===null&&(/^[^"]/.test(n.charAt(s.offset))?(r=n.charAt(s.offset),h(s,1)):(r=null,o===0&&p('[^"]'))),r!==null?t=[t,r]:(t=null,s=c(a))):(t=null,s=c(a)),t!==null&&(t=function(e,t,n,r){return r}(u.offset,u.line,u.column,t[1])),t===null&&(s=c(u));if(t!==null){e=[];while(t!==null)e.push(t),u=c(s),a=c(s),f=c(s),o++,t=q(),o--,t===null?t="":(t=null,s=c(f)),t!==null?(r=F(),r===null&&(/^[^"]/.test(n.charAt(s.offset))?(r=n.charAt(s.offset),h(s,1)):(r=null,o===0&&p('[^"]'))),r!==null?t=[t,r]:(t=null,s=c(a))):(t=null,s=c(a)),t!==null&&(t=function(e,t,n,r){return r}(u.offset,u.line,u.column,t[1])),t===null&&(s=c(u))}else e=null;return e!==null&&(e=function(e,t,n,r){return r.join("")}(i.offset,i.line,i.column,e)),e===null&&(s=c(i)),o--,o===0&&e===null&&p("literal"),e}function F(){var e,t;return t=c(s),n.substr(s.offset,2)==='\\"'?(e='\\"',h(s,2)):(e=null,o===0&&p('"\\\\\\""')),e!==null&&(e=function(e,t,n){return'"'}(t.offset,t.line,t.column)),e===null&&(s=c(t)),e}function I(){var e,t,r,i,u,a,f,l,d;o++,u=c(s),a=c(s),n.substr(s.offset,2)==="{!"?(e="{!",h(s,2)):(e=null,o===0&&p('"{!"'));if(e!==null){t=[],f=c(s),l=c(s),d=c(s),o++,n.substr(s.offset,2)==="!}"?(r="!}",h(s,2)):(r=null,o===0&&p('"!}"')),o--,r===null?r="":(r=null,s=c(d)),r!==null?(n.length>s.offset?(i=n.charAt(s.offset),h(s,1)):(i=null,o===0&&p("any character")),i!==null?r=[r,i]:(r=null,s=c(l))):(r=null,s=c(l)),r!==null&&(r=function(e,t,n,r){return r}(f.offset,f.line,f.column,r[1])),r===null&&(s=c(f));while(r!==null)t.push(r),f=c(s),l=c(s),d=c(s),o++,n.substr(s.offset,2)==="!}"?(r="!}",h(s,2)):(r=null,o===0&&p('"!}"')),o--,r===null?r="":(r=null,s=c(d)),r!==null?(n.length>s.offset?(i=n.charAt(s.offset),h(s,1)):(i=null,o===0&&p("any character")),i!==null?r=[r,i]:(r=null,s=c(l))):(r=null,s=c(l)),r!==null&&(r=function(e,t,n,r){return r}(f.offset,f.line,f.column,r[1])),r===null&&(s=c(f));t!==null?(n.substr(s.offset,2)==="!}"?(r="!}",h(s,2)):(r=null,o===0&&p('"!}"')),r!==null?e=[e,t,r]:(e=null,s=c(a))):(e=null,s=c(a))}else e=null,s=c(a);return e!==null&&(e=function(e,t,n,r){return["comment",r.join("")]}(u.offset,u.line,u.column,e[1])),e===null&&(s=c(u)),o--,o===0&&e===null&&p("comment"),e}function q(){var e,t,r,i,u,a,f,l,d,v,m;d=c(s),e=R();if(e!==null){t=[],r=V();while(r!==null)t.push(r),r=V();if(t!==null){/^[#?^><+%:@\/~%]/.test(n.charAt(s.offset))?(r=n.charAt(s.offset),h(s,1)):(r=null,o===0&&p("[#?^><+%:@\\/~%]"));if(r!==null){i=[],u=V();while(u!==null)i.push(u),u=V();if(i!==null){v=c(s),m=c(s),o++,a=U(),o--,a===null?a="":(a=null,s=c(m)),a!==null?(m=c(s),o++,f=X(),o--,f===null?f="":(f=null,s=c(m)),f!==null?(n.length>s.offset?(l=n.charAt(s.offset),h(s,1)):(l=null,o===0&&p("any character")),l!==null?a=[a,f,l]:(a=null,s=c(v))):(a=null,s=c(v))):(a=null,s=c(v));if(a!==null){u=[];while(a!==null)u.push(a),v=c(s),m=c(s),o++,a=U(),o--,a===null?a="":(a=null,s=c(m)),a!==null?(m=c(s),o++,f=X(),o--,f===null?f="":(f=null,s=c(m)),f!==null?(n.length>s.offset?(l=n.charAt(s.offset),h(s,1)):(l=null,o===0&&p("any character")),l!==null?a=[a,f,l]:(a=null,s=c(v))):(a=null,s=c(v))):(a=null,s=c(v))}else u=null;if(u!==null){a=[],f=V();while(f!==null)a.push(f),f=V();a!==null?(f=U(),f!==null?e=[e,t,r,i,u,a,f]:(e=null,s=c(d))):(e=null,s=c(d))}else e=null,s=c(d)}else e=null,s=c(d)}else e=null,s=c(d)}else e=null,s=c(d)}else e=null,s=c(d);return e===null&&(e=S()),e}function R(){var e;return n.charCodeAt(s.offset)===123?(e="{",h(s,1)):(e=null,o===0&&p('"{"')),e}function U(){var e;return n.charCodeAt(s.offset)===125?(e="}",h(s,1)):(e=null,o===0&&p('"}"')),e}function z(){var e;return n.charCodeAt(s.offset)===91?(e="[",h(s,1)):(e=null,o===0&&p('"["')),e}function W(){var e;return n.charCodeAt(s.offset)===93?(e="]",h(s,1)):(e=null,o===0&&p('"]"')),e}function X(){var e;return n.charCodeAt(s.offset)===10?(e="\n",h(s,1)):(e=null,o===0&&p('"\\n"')),e===null&&(n.substr(s.offset,2)==="\r\n"?(e="\r\n",h(s,2)):(e=null,o===0&&p('"\\r\\n"')),e===null&&(n.charCodeAt(s.offset)===13?(e="\r",h(s,1)):(e=null,o===0&&p('"\\r"')),e===null&&(n.charCodeAt(s.offset)===8232?(e="\u2028",h(s,1)):(e=null,o===0&&p('"\\u2028"')),e===null&&(n.charCodeAt(s.offset)===8233?(e="\u2029",h(s,1)):(e=null,o===0&&p('"\\u2029"')))))),e}function V(){var e;return/^[\t\x0B\f \xA0\uFEFF]/.test(n.charAt(s.offset))?(e=n.charAt(s.offset),h(s,1)):(e=null,o===0&&p("[\\t\\x0B\\f \\xA0\\uFEFF]")),e===null&&(e=X()),e}function $(e){e.sort();var t=null,n=[];for(var r=0;ru.offset?s:u;throw new t.SyntaxError($(a),Q,K,G.line,G.column)}return J},toSource:function(){return this._source}};return n.SyntaxError=function(t,n,r,i,s){function o(t,n){var r,i;switch(t.length){case 0:r="end of input";break;case 1:r=t[0];break;default:r=t.slice(0,t.length-1).join(", ")+" or "+t[t.length-1]}return i=n?e(n):"end of input","Expected "+r+" but "+i+" found."}this.name="SyntaxError",this.expected=t,this.found=n,this.message=o(t,n),this.offset=r,this.line=i,this.column=s},n.SyntaxError.prototype=Error.prototype,n}();e.parse=t.parse}(typeof exports!="undefined"?exports:getGlobal()) \ No newline at end of file diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-helpers-1.1.1.min.js b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-helpers-1.1.1.min.js new file mode 100644 index 00000000000..1c8cc173877 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dust-helpers-1.1.1.min.js @@ -0,0 +1,8 @@ +(function(k){function n(b){b=b.f();return"object"===typeof b&&!0===b.h}function p(b,c){return"function"===typeof c?c.toString():c}function l(b,c,d,a,e){a=a||{};var m=d.a,g,f,l=a.d||"";if("undefined"!==typeof a.key)g=k.b.c(a.key,b,c);else if(n(c))g=c.f().i,c.f().g&&(e=function(){return!1});else return h.log("No key specified for filter in:"+l+" helper "),b;f=k.b.c(a.value,b,c);if(e(q(f,a.type,c),q(g,a.type,c))){n(c)&&(c.f().g=!0);if(m)return b.e(m,c);h.log("Missing body block in the "+l+" helper ")}else if(d["else"])return b.e(d["else"], +c);return b}function q(b,c,d){if(b)switch(c||typeof b){case "number":return+b;case "string":return String(b);case "boolean":return Boolean("false"===b?!1:b);case "date":return new Date(b);case "context":return d.get(b)}return b}var h="undefined"!==typeof console?console:{log:function(){}};k.b={tap:function(b,c,d){var a=b;"function"===typeof b&&(!0===b.l?a=b():(a="",c.c(function(b){a+=b;return""}).e(b,d).p(),""===a&&(a=!1)));return a},sep:function(b,c,d){return c.stack.index===c.stack.m-1?b:d.a?d.a(b, +c):b},idx:function(b,c,d){return d.a?d.a(b,c.push(c.stack.index)):b},contextDump:function(b,c,d,a){a=a||{};d=a.o||"output";a=a.key||"current";d=k.b.c(d,b,c);a=k.b.c(a,b,c);c="full"===a?JSON.stringify(c.stack,p,2):JSON.stringify(c.stack.head,p,2);return"console"===d?(h.log(c),b):b.write(c)},"if":function(b,c,d,a){var e=d.a,m=d["else"];if(a&&a.j){a=a.j;a=k.b.c(a,b,c);if(eval(a)){if(e)return b.e(d.a,c);h.log("Missing body block in the if helper!");return b}if(m)return b.e(d["else"],c)}else h.log("No condition given in the if helper!"); +return b},math:function(b,c,d,a){if(a&&"undefined"!==typeof a.key&&a.method){var e=a.key,m=a.method,g=a.n;a=a.round;var f=null,e=k.b.c(e,b,c),g=k.b.c(g,b,c);switch(m){case "mod":0!==g&&-0!==g||h.log("operand for divide operation is 0/-0: expect Nan!");f=parseFloat(e)%parseFloat(g);break;case "add":f=parseFloat(e)+parseFloat(g);break;case "subtract":f=parseFloat(e)-parseFloat(g);break;case "multiply":f=parseFloat(e)*parseFloat(g);break;case "divide":0!==g&&-0!==g||h.log("operand for divide operation is 0/-0: expect Nan/Infinity!"); +f=parseFloat(e)/parseFloat(g);break;case "ceil":f=Math.ceil(parseFloat(e));break;case "floor":f=Math.floor(parseFloat(e));break;case "round":f=Math.round(parseFloat(e));break;case "abs":f=Math.abs(parseFloat(e));break;default:h.log("method passed is not supported")}if(null!==f)return a&&(f=Math.round(f)),d&&d.a?b.e(d.a,c.push({h:!0,g:!1,i:f})):b.write(f)}else h.log("Key is a required parameter for math helper along with method/operand!");return b},select:function(b,c,d,a){var e=d.a;if(a&&"undefined"!== +typeof a.key){a=k.b.c(a.key,b,c);if(e)return b.e(d.a,c.push({h:!0,g:!1,i:a}));h.log("Missing body block in the select helper ")}else h.log("No key given in the select helper!");return b},eq:function(b,c,d,a){a&&(a.d="eq");return l(b,c,d,a,function(a,b){return b===a})},ne:function(b,c,d,a){return a?(a.d="ne",l(b,c,d,a,function(a,b){return b!==a})):b},lt:function(b,c,d,a){if(a)return a.d="lt",l(b,c,d,a,function(a,b){return ba})):b},gte:function(b,c,d,a){return a?(a.d="gte",l(b,c,d,a,function(a,b){return b>=a})):b},"default":function(b,c,d,a){a&&(a.d="default");return l(b,c,d,a,function(){return!0})},size:function(b,c,d,a){c=0;var e;a=a||{};if((a=a.key)&&!0!==a)if(k.isArray(a))c=a.length;else if(!isNaN(parseFloat(a))&&isFinite(a))c=a;else if("object"===typeof a)for(e in c=0,a)Object.hasOwnProperty.call(a,e)&&c++;else c=(a+"").length;else c= +0;return b.write(c)}}})("undefined"!==typeof exports?module.k=require("dustjs-linkedin"):dust); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java index 6388bdd9e7a..80e180b4d8f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestHDFSFileContextMainOperations.java @@ -19,8 +19,10 @@ package org.apache.hadoop.fs; import static org.apache.hadoop.fs.FileContextTestHelper.exists; +import static org.junit.Assert.fail; import java.io.IOException; +import java.net.URI; import java.net.URISyntaxException; import javax.security.auth.login.LoginException; @@ -55,7 +57,8 @@ public class TestHDFSFileContextMainOperations extends LoginException, URISyntaxException { cluster = new MiniDFSCluster.Builder(CONF).numDataNodes(2).build(); cluster.waitClusterUp(); - fc = FileContext.getFileContext(cluster.getURI(0), CONF); + URI uri0 = cluster.getURI(0); + fc = FileContext.getFileContext(uri0, CONF); defaultWorkingDirectory = fc.makeQualified( new Path("/user/" + UserGroupInformation.getCurrentUser().getShortUserName())); fc.mkdir(defaultWorkingDirectory, FileContext.DEFAULT_PERM, true); @@ -77,7 +80,10 @@ public class TestHDFSFileContextMainOperations extends @AfterClass public static void ClusterShutdownAtEnd() throws Exception { - cluster.shutdown(); + if (cluster != null) { + cluster.shutdown(); + cluster = null; + } } @Override @@ -111,7 +117,7 @@ public class TestHDFSFileContextMainOperations extends @Test public void testOldRenameWithQuota() throws Exception { - DistributedFileSystem fs = (DistributedFileSystem) cluster.getFileSystem(); + DistributedFileSystem fs = cluster.getFileSystem(); Path src1 = getTestRootPath(fc, "test/testOldRenameWithQuota/srcdir/src1"); Path src2 = getTestRootPath(fc, "test/testOldRenameWithQuota/srcdir/src2"); Path dst1 = getTestRootPath(fc, "test/testOldRenameWithQuota/dstdir/dst1"); @@ -146,7 +152,7 @@ public class TestHDFSFileContextMainOperations extends @Test public void testRenameWithQuota() throws Exception { - DistributedFileSystem fs = (DistributedFileSystem) cluster.getFileSystem(); + DistributedFileSystem fs = cluster.getFileSystem(); Path src1 = getTestRootPath(fc, "test/testRenameWithQuota/srcdir/src1"); Path src2 = getTestRootPath(fc, "test/testRenameWithQuota/srcdir/src2"); Path dst1 = getTestRootPath(fc, "test/testRenameWithQuota/dstdir/dst1"); @@ -210,7 +216,7 @@ public class TestHDFSFileContextMainOperations extends */ @Test public void testEditsLogOldRename() throws Exception { - DistributedFileSystem fs = (DistributedFileSystem) cluster.getFileSystem(); + DistributedFileSystem fs = cluster.getFileSystem(); Path src1 = getTestRootPath(fc, "testEditsLogOldRename/srcdir/src1"); Path dst1 = getTestRootPath(fc, "testEditsLogOldRename/dstdir/dst1"); createFile(src1); @@ -226,7 +232,7 @@ public class TestHDFSFileContextMainOperations extends // Restart the cluster and ensure the above operations can be // loaded from the edits log restartCluster(); - fs = (DistributedFileSystem)cluster.getFileSystem(); + fs = cluster.getFileSystem(); src1 = getTestRootPath(fc, "testEditsLogOldRename/srcdir/src1"); dst1 = getTestRootPath(fc, "testEditsLogOldRename/dstdir/dst1"); Assert.assertFalse(fs.exists(src1)); // ensure src1 is already renamed @@ -239,7 +245,7 @@ public class TestHDFSFileContextMainOperations extends */ @Test public void testEditsLogRename() throws Exception { - DistributedFileSystem fs = (DistributedFileSystem) cluster.getFileSystem(); + DistributedFileSystem fs = cluster.getFileSystem(); Path src1 = getTestRootPath(fc, "testEditsLogRename/srcdir/src1"); Path dst1 = getTestRootPath(fc, "testEditsLogRename/dstdir/dst1"); createFile(src1); @@ -255,7 +261,7 @@ public class TestHDFSFileContextMainOperations extends // Restart the cluster and ensure the above operations can be // loaded from the edits log restartCluster(); - fs = (DistributedFileSystem)cluster.getFileSystem(); + fs = cluster.getFileSystem(); src1 = getTestRootPath(fc, "testEditsLogRename/srcdir/src1"); dst1 = getTestRootPath(fc, "testEditsLogRename/dstdir/dst1"); Assert.assertFalse(fs.exists(src1)); // ensure src1 is already renamed @@ -279,7 +285,7 @@ public class TestHDFSFileContextMainOperations extends private void oldRename(Path src, Path dst, boolean renameSucceeds, boolean exception) throws Exception { - DistributedFileSystem fs = (DistributedFileSystem) cluster.getFileSystem(); + DistributedFileSystem fs = cluster.getFileSystem(); try { Assert.assertEquals(renameSucceeds, fs.rename(src, dst)); } catch (Exception ex) { @@ -301,4 +307,23 @@ public class TestHDFSFileContextMainOperations extends Assert.assertEquals(renameSucceeds, !exists(fc, src)); Assert.assertEquals((dstExists||renameSucceeds), exists(fc, dst)); } + + @Override + protected boolean listCorruptedBlocksSupported() { + return true; + } + + @Test + public void testCrossFileSystemRename() throws IOException { + try { + fc.rename( + new Path("hdfs://127.0.0.1/aaa/bbb/Foo"), + new Path("file://aaa/bbb/Moo"), + Options.Rename.OVERWRITE); + fail("IOexception expected."); + } catch (IOException ioe) { + // okay + } + } + } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDeletion.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDeletion.java index 2408b4efb8f..442cc63aaee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDeletion.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDeletion.java @@ -23,12 +23,15 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import java.io.ByteArrayOutputStream; import java.io.FileNotFoundException; import java.io.IOException; +import java.io.PrintStream; import java.security.PrivilegedAction; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; @@ -921,4 +924,29 @@ public class TestSnapshotDeletion { subFile1Status = hdfs.getFileStatus(subFile1SCopy); assertEquals(REPLICATION_1, subFile1Status.getReplication()); } + + @Test + public void testDeleteSnapshotCommandWithIllegalArguments() throws Exception { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + PrintStream psOut = new PrintStream(out); + System.setOut(psOut); + System.setErr(psOut); + FsShell shell = new FsShell(); + shell.setConf(conf); + + String[] argv1 = {"-deleteSnapshot", "/tmp"}; + int val = shell.run(argv1); + assertTrue(val == -1); + assertTrue(out.toString().contains( + argv1[0] + ": Incorrect number of arguments.")); + out.reset(); + + String[] argv2 = {"-deleteSnapshot", "/tmp", "s1", "s2"}; + val = shell.run(argv2); + assertTrue(val == -1); + assertTrue(out.toString().contains( + argv2[0] + ": Incorrect number of arguments.")); + psOut.close(); + out.close(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotRename.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotRename.java index 386563bea1f..58fa1ffc407 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotRename.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotRename.java @@ -22,10 +22,13 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; @@ -226,4 +229,29 @@ public class TestSnapshotRename { } } } + + @Test + public void testRenameSnapshotCommandWithIllegalArguments() throws Exception { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + PrintStream psOut = new PrintStream(out); + System.setOut(psOut); + System.setErr(psOut); + FsShell shell = new FsShell(); + shell.setConf(conf); + + String[] argv1 = {"-renameSnapshot", "/tmp", "s1"}; + int val = shell.run(argv1); + assertTrue(val == -1); + assertTrue(out.toString().contains( + argv1[0] + ": Incorrect number of arguments.")); + out.reset(); + + String[] argv2 = {"-renameSnapshot", "/tmp", "s1", "s2", "s3"}; + val = shell.run(argv2); + assertTrue(val == -1); + assertTrue(out.toString().contains( + argv2[0] + ": Incorrect number of arguments.")); + psOut.close(); + out.close(); + } }