HADOOP-12974. Create a CachingGetSpaceUsed implementation that uses df. Contributed by Elliott Clark.

This commit is contained in:
Wei-Chiu Chuang 2016-09-30 12:58:37 -07:00
parent 7fad1221d6
commit 57aec2b46b
3 changed files with 126 additions and 5 deletions

View File

@ -0,0 +1,48 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import java.io.IOException;
/**
* Fast but inaccurate class to tell how much space HDFS is using.
* This class makes the assumption that the entire mount is used for
* HDFS and that no two hdfs data dirs are on the same disk.
*
* To use set fs.getspaceused.classname
* to org.apache.hadoop.fs.DFCachingGetSpaceUsed in your core-site.xml
*
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
public class DFCachingGetSpaceUsed extends CachingGetSpaceUsed {
private final DF df;
public DFCachingGetSpaceUsed(Builder builder) throws IOException {
super(builder);
this.df = new DF(builder.getPath(), builder.getInterval());
}
@Override
protected void refresh() {
this.used.set(df.getUsed());
}
}

View File

@ -31,12 +31,13 @@
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class DU extends CachingGetSpaceUsed { public class DU extends CachingGetSpaceUsed {
private DUShell duShell; private final DUShell duShell;
@VisibleForTesting @VisibleForTesting
public DU(File path, long interval, long jitter, long initialUsed) public DU(File path, long interval, long jitter, long initialUsed)
throws IOException { throws IOException {
super(path, interval, jitter, initialUsed); super(path, interval, jitter, initialUsed);
this.duShell = new DUShell();
} }
public DU(CachingGetSpaceUsed.Builder builder) throws IOException { public DU(CachingGetSpaceUsed.Builder builder) throws IOException {
@ -48,9 +49,6 @@ public DU(CachingGetSpaceUsed.Builder builder) throws IOException {
@Override @Override
protected synchronized void refresh() { protected synchronized void refresh() {
if (duShell == null) {
duShell = new DUShell();
}
try { try {
duShell.startRefresh(); duShell.startRefresh();
} catch (IOException ioe) { } catch (IOException ioe) {

View File

@ -0,0 +1,75 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import static org.junit.Assert.assertTrue;
/**
* Test to make sure df can run and work.
*/
public class TestDFCachingGetSpaceUsed {
final static private File DF_DIR = GenericTestUtils.getTestDir("testdfspace");
public static final int FILE_SIZE = 1024;
@Before
public void setUp() {
FileUtil.fullyDelete(DF_DIR);
assertTrue(DF_DIR.mkdirs());
}
@After
public void tearDown() throws IOException {
FileUtil.fullyDelete(DF_DIR);
}
@Test
public void testCanBuildRun() throws Exception {
File file = writeFile("testCanBuild");
GetSpaceUsed instance = new CachingGetSpaceUsed.Builder()
.setPath(file)
.setInterval(50060)
.setKlass(DFCachingGetSpaceUsed.class)
.build();
assertTrue(instance instanceof DFCachingGetSpaceUsed);
assertTrue(instance.getUsed() >= FILE_SIZE - 20);
((DFCachingGetSpaceUsed) instance).close();
}
private File writeFile(String fileName) throws IOException {
File f = new File(DF_DIR, fileName);
assertTrue(f.createNewFile());
RandomAccessFile randomAccessFile = new RandomAccessFile(f, "rws");
randomAccessFile.writeUTF(RandomStringUtils.randomAlphabetic(FILE_SIZE));
randomAccessFile.getFD().sync();
randomAccessFile.close();
return f;
}
}