Do not create directories if repository is readonly (#26909)

For FsBlobStore and HdfsBlobStore, if the repository is read only, the blob store should be aware of the readonly setting and do not create directories if they don't exist.

Closes #21495
This commit is contained in:
kel 2017-11-03 07:10:50 -05:00 committed by Yannick Welsch
parent 9abc26ee92
commit 0f21262b36
7 changed files with 118 additions and 27 deletions

View File

@ -39,10 +39,15 @@ public class FsBlobStore extends AbstractComponent implements BlobStore {
private final int bufferSizeInBytes;
private final boolean readOnly;
public FsBlobStore(Settings settings, Path path) throws IOException {
super(settings);
this.path = path;
this.readOnly = settings.getAsBoolean("readonly", false);
if (!this.readOnly) {
Files.createDirectories(path);
}
this.bufferSizeInBytes = (int) settings.getAsBytesSize("repositories.fs.buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).getBytes();
}
@ -80,7 +85,9 @@ public class FsBlobStore extends AbstractComponent implements BlobStore {
private synchronized Path buildAndCreate(BlobPath path) throws IOException {
Path f = buildPath(path);
if (!readOnly) {
Files.createDirectories(f);
}
return f;
}

View File

@ -20,12 +20,14 @@ package org.elasticsearch.common.blobstore;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.blobstore.fs.FsBlobStore;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.repositories.ESBlobStoreTestCase;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
@ -35,4 +37,39 @@ public class FsBlobStoreTests extends ESBlobStoreTestCase {
Settings settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put("buffer_size", new ByteSizeValue(randomIntBetween(1, 100), ByteSizeUnit.KB)).build();
return new FsBlobStore(settings, tempDir);
}
public void testReadOnly() throws Exception {
Settings settings = Settings.builder().put("readonly", true).build();
Path tempDir = createTempDir();
Path path = tempDir.resolve("bar");
try (FsBlobStore store = new FsBlobStore(settings, path)) {
assertFalse(Files.exists(path));
BlobPath blobPath = BlobPath.cleanPath().add("foo");
store.blobContainer(blobPath);
Path storePath = store.path();
for (String d : blobPath) {
storePath = storePath.resolve(d);
}
assertFalse(Files.exists(storePath));
}
settings = randomBoolean() ? Settings.EMPTY : Settings.builder().put("readonly", false).build();
try (FsBlobStore store = new FsBlobStore(settings, path)) {
assertTrue(Files.exists(path));
BlobPath blobPath = BlobPath.cleanPath().add("foo");
BlobContainer container = store.blobContainer(blobPath);
Path storePath = store.path();
for (String d : blobPath) {
storePath = storePath.resolve(d);
}
assertTrue(Files.exists(storePath));
assertTrue(Files.isDirectory(storePath));
byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16)));
writeBlob(container, "test", new BytesArray(data));
assertArrayEquals(readBlobFully(container, "test", data.length), data);
assertTrue(container.blobExists("test"));
}
}
}

View File

@ -39,19 +39,23 @@ final class HdfsBlobStore implements BlobStore {
private final FileContext fileContext;
private final HdfsSecurityContext securityContext;
private final int bufferSize;
private final boolean readOnly;
private volatile boolean closed;
HdfsBlobStore(FileContext fileContext, String path, int bufferSize) throws IOException {
HdfsBlobStore(FileContext fileContext, String path, int bufferSize, boolean readOnly) throws IOException {
this.fileContext = fileContext;
this.securityContext = new HdfsSecurityContext(fileContext.getUgi());
this.bufferSize = bufferSize;
this.root = execute(fileContext1 -> fileContext1.makeQualified(new Path(path)));
this.readOnly = readOnly;
if (!readOnly) {
try {
mkdirs(root);
} catch (FileAlreadyExistsException ok) {
// behaves like Files.createDirectories
}
}
}
private void mkdirs(Path path) throws IOException {
execute((Operation<Void>) fileContext -> {
@ -80,6 +84,7 @@ final class HdfsBlobStore implements BlobStore {
private Path buildHdfsPath(BlobPath blobPath) {
final Path path = translateToHdfsPath(blobPath);
if (!readOnly) {
try {
mkdirs(path);
} catch (FileAlreadyExistsException ok) {
@ -87,6 +92,7 @@ final class HdfsBlobStore implements BlobStore {
} catch (IOException ex) {
throw new ElasticsearchException("failed to create blob container", ex);
}
}
return path;
}

View File

@ -106,7 +106,7 @@ public final class HdfsRepository extends BlobStoreRepository {
SpecialPermission.check();
FileContext fileContext = AccessController.doPrivileged((PrivilegedAction<FileContext>)
() -> createContext(uri, getMetadata().settings()));
blobStore = new HdfsBlobStore(fileContext, pathSetting, bufferSize);
blobStore = new HdfsBlobStore(fileContext, pathSetting, bufferSize, isReadOnly());
logger.debug("Using file-system [{}] for URI [{}], path [{}]", fileContext.getDefaultFileSystem(), fileContext.getDefaultFileSystem().getUri(), pathSetting);
} catch (IOException e) {
throw new UncheckedIOException(String.format(Locale.ROOT, "Cannot create HDFS repository for uri [%s]", uri), e);

View File

@ -19,6 +19,20 @@
package org.elasticsearch.repositories.hdfs;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.AbstractFileSystem;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
import javax.security.auth.Subject;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
@ -29,22 +43,20 @@ import java.security.PrivilegedAction;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import javax.security.auth.Subject;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.AbstractFileSystem;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes;
import static org.elasticsearch.repositories.ESBlobStoreTestCase.readBlobFully;
@ThreadLeakFilters(filters = {HdfsClientThreadLeakFilter.class})
public class HdfsBlobStoreContainerTests extends ESBlobStoreContainerTestCase {
@Override
protected BlobStore newBlobStore() throws IOException {
return new HdfsBlobStore(createTestContext(), "temp", 1024, false);
}
private FileContext createTestContext() {
FileContext fileContext;
try {
fileContext = AccessController.doPrivileged((PrivilegedExceptionAction<FileContext>)
@ -52,7 +64,7 @@ public class HdfsBlobStoreContainerTests extends ESBlobStoreContainerTestCase {
} catch (PrivilegedActionException e) {
throw new RuntimeException(e.getCause());
}
return new HdfsBlobStore(fileContext, "temp", 1024);
return fileContext;
}
@SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)")
@ -98,4 +110,33 @@ public class HdfsBlobStoreContainerTests extends ESBlobStoreContainerTestCase {
}
});
}
public void testReadOnly() throws Exception {
FileContext fileContext = createTestContext();
// Constructor will not create dir if read only
HdfsBlobStore hdfsBlobStore = new HdfsBlobStore(fileContext, "dir", 1024, true);
FileContext.Util util = fileContext.util();
Path root = fileContext.makeQualified(new Path("dir"));
assertFalse(util.exists(root));
BlobPath blobPath = BlobPath.cleanPath().add("path");
// blobContainer() will not create path if read only
hdfsBlobStore.blobContainer(blobPath);
Path hdfsPath = root;
for (String p : blobPath) {
hdfsPath = new Path(hdfsPath, p);
}
assertFalse(util.exists(hdfsPath));
// if not read only, directory will be created
hdfsBlobStore = new HdfsBlobStore(fileContext, "dir", 1024, false);
assertTrue(util.exists(root));
BlobContainer container = hdfsBlobStore.blobContainer(blobPath);
assertTrue(util.exists(hdfsPath));
byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16)));
writeBlob(container, "foo", new BytesArray(data));
assertArrayEquals(readBlobFully(container, "foo", data.length), data);
assertTrue(container.blobExists("foo"));
}
}

View File

@ -142,7 +142,7 @@ public abstract class ESBlobStoreContainerTestCase extends ESTestCase {
}
}
private void writeBlob(final BlobContainer container, final String blobName, final BytesArray bytesArray) throws IOException {
protected void writeBlob(final BlobContainer container, final String blobName, final BytesArray bytesArray) throws IOException {
try (InputStream stream = bytesArray.streamInput()) {
container.writeBlob(blobName, stream, bytesArray.length());
}

View File

@ -78,7 +78,7 @@ public abstract class ESBlobStoreTestCase extends ESTestCase {
return data;
}
private static void writeBlob(BlobContainer container, String blobName, BytesArray bytesArray) throws IOException {
protected static void writeBlob(BlobContainer container, String blobName, BytesArray bytesArray) throws IOException {
try (InputStream stream = bytesArray.streamInput()) {
container.writeBlob(blobName, stream, bytesArray.length());
}