From fe349ddcf2975fb7afe464574ff6120dd3f88b80 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 5 Feb 2020 16:38:53 +0100 Subject: [PATCH] SOLR-14242: HdfsDirectory#createTempOutput. (#1240) --- solr/CHANGES.txt | 2 ++ .../apache/solr/store/hdfs/HdfsDirectory.java | 15 ++++++++-- .../solr/store/hdfs/HdfsDirectoryTest.java | 29 +++++++++++++++++-- 3 files changed, 41 insertions(+), 5 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index eb874893548..a176c08ee7d 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -170,6 +170,8 @@ New Features * SOLR-12325: Introducing uniqueBlock({!v=type:parent}) aggregation (Anatolii Siuniaev via Mikhail Khludnev) + * SOLR-14242: HdfsDirectory now supports indexing geo-points, ranges or shapes. (Adrien Grand) + Improvements --------------------- * SOLR-14120: Define JavaScript methods 'includes' and 'startsWith' to ensure AdminUI can be displayed when using diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java index cc1133e930b..3c2890e4836 100644 --- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java +++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java @@ -24,6 +24,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; @@ -55,7 +56,10 @@ public class HdfsDirectory extends BaseDirectory { private final FileContext fileContext; private final int bufferSize; - + + /** Used to generate temp file names in {@link #createTempOutput}. */ + private final AtomicLong nextTempFileCounter = new AtomicLong(); + public HdfsDirectory(Path hdfsDirPath, Configuration configuration) throws IOException { this(hdfsDirPath, HdfsLockFactory.INSTANCE, configuration, DEFAULT_BUFFER_SIZE); } @@ -123,7 +127,14 @@ public class HdfsDirectory extends BaseDirectory { @Override public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) throws IOException { - throw new UnsupportedOperationException(); + while (true) { + try { + String name = getTempFileName(prefix, suffix, nextTempFileCounter.getAndIncrement()); + return new HdfsFileWriter(getFileSystem(), new Path(hdfsDirPath, name), name); + } catch (FileAlreadyExistsException faee) { + // Retry with next incremented name + } + } } private String[] getNormalNames(List files) { diff --git a/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java b/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java index 474983da6fd..baa328e88f0 100644 --- a/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java +++ b/solr/core/src/test/org/apache/solr/store/hdfs/HdfsDirectoryTest.java @@ -53,6 +53,8 @@ public class HdfsDirectoryTest extends SolrTestCaseJ4 { private static final int MAX_BUFFER_SIZE = 5000; private static final int MAX_NUMBER_OF_READS = 10000; private static MiniDFSCluster dfsCluster; + private Configuration directoryConf; + private Path directoryPath; private HdfsDirectory directory; private Random random; @@ -74,10 +76,11 @@ public class HdfsDirectoryTest extends SolrTestCaseJ4 { public void setUp() throws Exception { super.setUp(); - Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster); - conf.set("dfs.permissions.enabled", "false"); + directoryConf = HdfsTestUtil.getClientConfiguration(dfsCluster); + directoryConf.set("dfs.permissions.enabled", "false"); - directory = new HdfsDirectory(new Path(dfsCluster.getURI().toString() + createTempDir().toFile().getAbsolutePath() + "/hdfs"), conf); + directoryPath = new Path(dfsCluster.getURI().toString() + createTempDir().toFile().getAbsolutePath() + "/hdfs"); + directory = new HdfsDirectory(directoryPath, directoryConf); random = random(); } @@ -240,4 +243,24 @@ public class HdfsDirectoryTest extends SolrTestCaseJ4 { () -> directory.createOutput("foo", IOContext.DEFAULT)); } + public void testCreateTempFiles() throws IOException { + String file1; + try (Directory dir = new HdfsDirectory(directoryPath, directoryConf); + IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) { + out.writeByte((byte) 42); + file1 = out.getName(); + } + assertTrue(file1.startsWith("foo_bar")); + assertTrue(file1.endsWith(".tmp")); + // Create the directory again to force the counter to be reset + String file2; + try (Directory dir = new HdfsDirectory(directoryPath, directoryConf); + IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) { + out.writeByte((byte) 42); + file2 = out.getName(); + } + assertTrue(file2.startsWith("foo_bar")); + assertTrue(file2.endsWith(".tmp")); + assertNotEquals(file1, file2); + } }