SOLR-14238: Fix HdfsDirectory to no longer overwrite existing files. (#1237)

This commit is contained in:
Adrien Grand 2020-02-04 19:35:16 +01:00 committed by GitHub
parent bb90569f1d
commit 2d8428ec2e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 18 additions and 3 deletions

View File

@ -27,6 +27,7 @@ import java.util.Set;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -111,7 +112,13 @@ public class HdfsDirectory extends BaseDirectory {
@Override @Override
public IndexOutput createOutput(String name, IOContext context) throws IOException { public IndexOutput createOutput(String name, IOContext context) throws IOException {
return new HdfsFileWriter(getFileSystem(), new Path(hdfsDirPath, name), name); try {
return new HdfsFileWriter(getFileSystem(), new Path(hdfsDirPath, name), name);
} catch (FileAlreadyExistsException e) {
java.nio.file.FileAlreadyExistsException ex = new java.nio.file.FileAlreadyExistsException(e.getMessage());
ex.initCause(e);
throw ex;
}
} }
@Override @Override

View File

@ -44,8 +44,7 @@ public class HdfsFileWriter extends OutputStreamIndexOutput {
Configuration conf = fileSystem.getConf(); Configuration conf = fileSystem.getConf();
FsServerDefaults fsDefaults = fileSystem.getServerDefaults(path); FsServerDefaults fsDefaults = fileSystem.getServerDefaults(path);
short replication = fileSystem.getDefaultReplication(path); short replication = fileSystem.getDefaultReplication(path);
EnumSet<CreateFlag> flags = EnumSet.of(CreateFlag.CREATE, EnumSet<CreateFlag> flags = EnumSet.of(CreateFlag.CREATE);
CreateFlag.OVERWRITE);
if (Boolean.getBoolean(HDFS_SYNC_BLOCK)) { if (Boolean.getBoolean(HDFS_SYNC_BLOCK)) {
flags.add(CreateFlag.SYNC_BLOCK); flags.add(CreateFlag.SYNC_BLOCK);
} }

View File

@ -17,6 +17,7 @@
package org.apache.solr.store.hdfs; package org.apache.solr.store.hdfs;
import java.io.IOException; import java.io.IOException;
import java.nio.file.FileAlreadyExistsException;
import java.util.HashSet; import java.util.HashSet;
import java.util.Random; import java.util.Random;
import java.util.Set; import java.util.Set;
@ -231,4 +232,12 @@ public class HdfsDirectoryTest extends SolrTestCaseJ4 {
return Long.toString(Math.abs(random.nextLong())); return Long.toString(Math.abs(random.nextLong()));
} }
public void testCantOverrideFiles() throws IOException {
try (IndexOutput out = directory.createOutput("foo", IOContext.DEFAULT)) {
out.writeByte((byte) 42);
}
expectThrows(FileAlreadyExistsException.class,
() -> directory.createOutput("foo", IOContext.DEFAULT));
}
} }