diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 334ee06df05..7ac4d1f7ed1 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 5.0.0-alpha6 -lucene = 6.1.0 +lucene = 6.2.0 # optional dependencies spatial4j = 0.6 diff --git a/core/src/main/java/org/apache/lucene/analysis/miscellaneous/UniqueTokenFilter.java b/core/src/main/java/org/apache/lucene/analysis/miscellaneous/UniqueTokenFilter.java index b0a6122b548..cc853932efc 100644 --- a/core/src/main/java/org/apache/lucene/analysis/miscellaneous/UniqueTokenFilter.java +++ b/core/src/main/java/org/apache/lucene/analysis/miscellaneous/UniqueTokenFilter.java @@ -19,11 +19,11 @@ package org.apache.lucene.analysis.miscellaneous; +import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; -import org.apache.lucene.analysis.util.CharArraySet; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index c1c93e38924..0d707e035c7 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -84,7 +84,7 @@ public class Version { public static final int V_5_0_0_alpha5_ID = 5000005; public static final Version V_5_0_0_alpha5 = new Version(V_5_0_0_alpha5_ID, org.apache.lucene.util.Version.LUCENE_6_1_0); public static final int V_5_0_0_alpha6_ID = 5000006; - public static final Version V_5_0_0_alpha6 = new Version(V_5_0_0_alpha6_ID, org.apache.lucene.util.Version.LUCENE_6_1_0); + public static final Version V_5_0_0_alpha6 = new Version(V_5_0_0_alpha6_ID, org.apache.lucene.util.Version.LUCENE_6_2_0); public static final Version CURRENT = V_5_0_0_alpha6; static { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 39e67ce6458..d555e199d08 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -88,7 +88,7 @@ import java.util.Objects; public class Lucene { public static final String LATEST_DOC_VALUES_FORMAT = "Lucene54"; public static final String LATEST_POSTINGS_FORMAT = "Lucene50"; - public static final String LATEST_CODEC = "Lucene60"; + public static final String LATEST_CODEC = "Lucene62"; static { Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java index 2ce5e489027..d2158f707ca 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -20,8 +20,9 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.LegacyNumericTokenStream; +import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ar.ArabicAnalyzer; import org.apache.lucene.analysis.bg.BulgarianAnalyzer; import org.apache.lucene.analysis.br.BrazilianAnalyzer; @@ -55,7 +56,6 @@ import org.apache.lucene.analysis.sv.SwedishAnalyzer; import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tr.TurkishAnalyzer; -import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.util.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.FileSystemUtils; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/ArabicAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/ArabicAnalyzerProvider.java index 5a1754a02fe..4b185c450d5 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/ArabicAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/ArabicAnalyzerProvider.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.analysis; +import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ar.ArabicAnalyzer; -import org.apache.lucene.analysis.util.CharArraySet; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -43,4 +43,4 @@ public class ArabicAnalyzerProvider extends AbstractIndexAnalyzerProvider codecs = MapBuilder.newMapBuilder(); if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene60Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene60Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene62Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene62Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); diff --git a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java index e16e66904c9..ec4636e396c 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -22,7 +22,7 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; -import org.apache.lucene.codecs.lucene60.Lucene60Codec; +import org.apache.lucene.codecs.lucene62.Lucene62Codec; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.mapper.CompletionFieldMapper; @@ -39,7 +39,7 @@ import org.elasticsearch.index.mapper.MapperService; * configured for a specific field the default postings format is used. */ // LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version -public class PerFieldMappingPostingFormatCodec extends Lucene60Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene62Codec { private final ESLogger logger; private final MapperService mapperService; diff --git a/core/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java b/core/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java index 0d53163f15e..7b79f785ff3 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java +++ b/core/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java @@ -85,7 +85,7 @@ final class LocalShardSnapshot implements Closeable { } @Override - public void renameFile(String source, String dest) throws IOException { + public void rename(String source, String dest) throws IOException { throw new UnsupportedOperationException("this directory is read-only"); } diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index 7bb81a0109b..a8b7fafb980 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -128,25 +128,14 @@ public final class SimilarityService extends AbstractIndexComponent { static class PerFieldSimilarity extends PerFieldSimilarityWrapper { private final Similarity defaultSimilarity; - private final Similarity baseSimilarity; private final MapperService mapperService; PerFieldSimilarity(Similarity defaultSimilarity, Similarity baseSimilarity, MapperService mapperService) { + super(baseSimilarity); this.defaultSimilarity = defaultSimilarity; - this.baseSimilarity = baseSimilarity; this.mapperService = mapperService; } - @Override - public float coord(int overlap, int maxOverlap) { - return baseSimilarity.coord(overlap, maxOverlap); - } - - @Override - public float queryNorm(float valueForNormalization) { - return baseSimilarity.queryNorm(valueForNormalization); - } - @Override public Similarity get(String name) { MappedFieldType fieldType = mapperService.fullName(name); diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index e714d8db8b6..ff3d89d9ff9 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -245,7 +245,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref /** - * Renames all the given files form the key of the map to the + * Renames all the given files from the key of the map to the * value of the map. All successfully renamed files are removed from the map in-place. */ public void renameTempFilesSafe(Map tempFileMap) throws IOException { @@ -282,10 +282,11 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref logger.debug("failed to delete file [{}]", ex, origFile); } // now, rename the files... and fail it it won't work - this.renameFile(tempFile, origFile); + directory.rename(tempFile, origFile); final String remove = tempFileMap.remove(tempFile); assert remove != null; } + directory.syncMetaData(); } finally { metadataLock.writeLock().unlock(); } @@ -297,11 +298,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref return statsCache.getOrRefresh(); } - public void renameFile(String from, String to) throws IOException { - ensureOpen(); - directory.renameFile(from, to); - } - /** * Increments the refCount of this Store instance. RefCounts are used to determine when a * Store can be closed safely, i.e. as soon as there are no more references. Be sure to always call a diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index b6e1c6e18cb..7afcb8a558a 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -1177,7 +1177,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } @Override - public void prepareCommit() throws IOException { + public long prepareCommit() throws IOException { try (ReleasableLock lock = writeLock.acquire()) { ensureOpen(); if (currentCommittingGeneration != NOT_SET_GENERATION) { @@ -1200,10 +1200,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC IOUtils.closeWhileHandlingException(this); // tragic event throw e; } + return 0L; } @Override - public void commit() throws IOException { + public long commit() throws IOException { try (ReleasableLock lock = writeLock.acquire()) { ensureOpen(); if (currentCommittingGeneration == NOT_SET_GENERATION) { @@ -1216,6 +1217,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC currentCommittingGeneration = NOT_SET_GENERATION; trimUnreferencedReaders(); } + return 0; } void trimUnreferencedReaders() { diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java index ca954619927..23ef9bdcd3f 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java @@ -19,6 +19,7 @@ package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ar.ArabicAnalyzer; import org.apache.lucene.analysis.bg.BulgarianAnalyzer; import org.apache.lucene.analysis.br.BrazilianAnalyzer; @@ -58,7 +59,6 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.sv.SwedishAnalyzer; import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.apache.lucene.analysis.tr.TurkishAnalyzer; -import org.apache.lucene.analysis.util.CharArraySet; import org.elasticsearch.Version; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.analysis.PatternAnalyzer; diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java index 6b69658b341..a31f60fc5bd 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java @@ -18,6 +18,9 @@ */ package org.elasticsearch.indices.analysis; +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.LowerCaseFilter; +import org.apache.lucene.analysis.StopFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; import org.apache.lucene.analysis.ar.ArabicStemFilter; @@ -27,9 +30,7 @@ import org.apache.lucene.analysis.cjk.CJKWidthFilter; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.core.DecimalDigitFilter; -import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.StopAnalyzer; -import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.core.UpperCaseFilter; import org.apache.lucene.analysis.cz.CzechStemFilter; import org.apache.lucene.analysis.de.GermanNormalizationFilter; @@ -60,7 +61,6 @@ import org.apache.lucene.analysis.snowball.SnowballFilter; import org.apache.lucene.analysis.standard.ClassicFilter; import org.apache.lucene.analysis.standard.StandardFilter; import org.apache.lucene.analysis.tr.ApostropheFilter; -import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.analysis.util.ElisionFilter; import org.elasticsearch.Version; import org.elasticsearch.index.analysis.DelimitedPayloadTokenFilterFactory; diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 856b3ff4264..1b42310a13a 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -33,6 +33,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RateLimiter; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; @@ -1636,6 +1637,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp } else { stream = new RateLimitingInputStream(partSliceStream, restoreRateLimiter, restoreRateLimitingTimeInNanos::inc); } + + // TODO: why does the target file sometimes already exist? Simon says: I think, this can happen if you fail a shard and + // it's not cleaned up yet, the restore process tries to reuse files + IOUtils.deleteFilesIgnoringExceptions(store.directory(), fileInfo.physicalName()); + try (final IndexOutput indexOutput = store.createVerifyingOutput(fileInfo.physicalName(), fileInfo.metadata(), IOContext.DEFAULT)) { final byte[] buffer = new byte[BUFFER_SIZE]; int length; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java index 5fcbf9db57b..d3008e999e8 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java @@ -41,6 +41,7 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.Version; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.mapper.CompletionFieldMapper2x; @@ -127,7 +128,7 @@ public class Completion090PostingsFormat extends PostingsFormat { boolean success = false; try { output = state.directory.createOutput(suggestFSTFile, state.context); - CodecUtil.writeHeader(output, CODEC_NAME, SUGGEST_VERSION_CURRENT); + CodecUtil.writeIndexHeader(output, CODEC_NAME, SUGGEST_VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); /* * we write the delegate postings format name so we can load it * without getting an instance in the ctor @@ -165,7 +166,13 @@ public class Completion090PostingsFormat extends PostingsFormat { public CompletionFieldsProducer(SegmentReadState state) throws IOException { String suggestFSTFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); IndexInput input = state.directory.openInput(suggestFSTFile, state.context); - version = CodecUtil.checkHeader(input, CODEC_NAME, SUGGEST_CODEC_VERSION, SUGGEST_VERSION_CURRENT); + if (state.segmentInfo.getVersion().onOrAfter(Version.LUCENE_6_2_0)) { + // Lucene 6.2.0+ requires all index files to use index header, but prior to that we used an ordinary codec header: + version = CodecUtil.checkIndexHeader(input, CODEC_NAME, SUGGEST_CODEC_VERSION, SUGGEST_VERSION_CURRENT, + state.segmentInfo.getId(), state.segmentSuffix); + } else { + version = CodecUtil.checkHeader(input, CODEC_NAME, SUGGEST_CODEC_VERSION, SUGGEST_VERSION_CURRENT); + } FieldsProducer delegateProducer = null; boolean success = false; try { diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 2f83d56543d..97ccfb31bf2 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.1.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-6.1.0.jar}" { +grant codeBase "${codebase.lucene-core-6.2.0.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; @@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-core-6.1.0.jar}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; -grant codeBase "${codebase.lucene-misc-6.1.0.jar}" { +grant codeBase "${codebase.lucene-misc-6.2.0.jar}" { // needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper permission java.nio.file.LinkPermission "hard"; }; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 86db85d2cdb..43f6b62c3c3 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -33,7 +33,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-6.1.0.jar}" { +grant codeBase "${codebase.lucene-test-framework-6.2.0.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // needed for testing hardlinks in StoreRecoveryTests since we install MockFS diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index 1bba4cac3dd..05e30d7e2d7 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -20,14 +20,14 @@ package org.elasticsearch.action.termvectors; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.LowerCaseFilter; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper; import org.apache.lucene.analysis.payloads.TypeAsPayloadTokenFilter; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.StandardTokenizer; -import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java index b4042164e23..4073bbdbbc9 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.analysis; -import org.apache.lucene.analysis.util.CharArraySet; +import org.apache.lucene.analysis.CharArraySet; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java index ac412207021..c5e854879e9 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/FingerprintAnalyzerTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.analysis; */ import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.util.CharArraySet; +import org.apache.lucene.analysis.CharArraySet; import org.elasticsearch.test.ESTokenStreamTestCase; public class FingerprintAnalyzerTests extends ESTokenStreamTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java index 186f6ac1cb7..f33ddc88cf4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactoryTests.java @@ -19,11 +19,12 @@ package org.elasticsearch.index.analysis; -import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; + +import org.apache.lucene.analysis.StopFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.elasticsearch.test.ESTokenStreamTestCase; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java index 191b7ffcdf5..07b00f30e20 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java @@ -19,14 +19,14 @@ package org.elasticsearch.index.analysis; +import org.apache.lucene.analysis.StopFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter; import org.apache.lucene.util.Version; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; diff --git a/core/src/test/java/org/elasticsearch/index/analysis/filter1/MyFilterTokenFilterFactory.java b/core/src/test/java/org/elasticsearch/index/analysis/filter1/MyFilterTokenFilterFactory.java index c7cd3cd625e..1c9a4798139 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/filter1/MyFilterTokenFilterFactory.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/filter1/MyFilterTokenFilterFactory.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.index.analysis.filter1; +import org.apache.lucene.analysis.StopFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.core.StopAnalyzer; -import org.apache.lucene.analysis.core.StopFilter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -36,4 +36,4 @@ public class MyFilterTokenFilterFactory extends AbstractTokenFilterFactory { public TokenStream create(TokenStream tokenStream) { return new StopFilter(tokenStream, StopAnalyzer.ENGLISH_STOP_WORDS_SET); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 80f1cbe46d0..cf706f33cc2 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -25,7 +25,7 @@ import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; import org.apache.lucene.codecs.lucene53.Lucene53Codec; import org.apache.lucene.codecs.lucene54.Lucene54Codec; -import org.apache.lucene.codecs.lucene60.Lucene60Codec; +import org.apache.lucene.codecs.lucene62.Lucene62Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -55,7 +55,7 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene60Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene62Codec.class)); assertThat(codecService.codec("Lucene54"), instanceOf(Lucene54Codec.class)); assertThat(codecService.codec("Lucene53"), instanceOf(Lucene53Codec.class)); assertThat(codecService.codec("Lucene50"), instanceOf(Lucene50Codec.class)); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index ba1894193b1..cb65e5374c8 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1017,12 +1017,6 @@ public class InternalEngineTests extends ESTestCase { engine.index(new Engine.Index(newUid("2"), doc)); EngineConfig config = engine.config(); engine.close(); - final MockDirectoryWrapper directory = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class); - if (directory != null) { - // since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents - // this so we have to disable the check explicitly - directory.setPreventDoubleWrite(false); - } engine = new InternalEngine(copy(config, EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG)); engine.recoverFromTranslog(); assertNull("Sync ID must be gone since we have a document to replay", engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID)); @@ -1758,7 +1752,6 @@ public class InternalEngineTests extends ESTestCase { if (directory != null) { // since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents // this so we have to disable the check explicitly - directory.setPreventDoubleWrite(false); boolean started = false; final int numIters = randomIntBetween(10, 20); for (int i = 0; i < numIters; i++) { @@ -1804,12 +1797,6 @@ public class InternalEngineTests extends ESTestCase { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); assertThat(topDocs.totalHits, equalTo(numDocs)); } - final MockDirectoryWrapper directory = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class); - if (directory != null) { - // since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents - // this so we have to disable the check explicitly - directory.setPreventDoubleWrite(false); - } engine.close(); engine = new InternalEngine(engine.config()); @@ -1928,12 +1915,6 @@ public class InternalEngineTests extends ESTestCase { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); assertThat(topDocs.totalHits, equalTo(numDocs)); } - final MockDirectoryWrapper directory = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class); - if (directory != null) { - // since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents - // this so we have to disable the check explicitly - directory.setPreventDoubleWrite(false); - } TranslogHandler parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer(); parser.mappingUpdate = dynamicUpdate(); @@ -2051,12 +2032,6 @@ public class InternalEngineTests extends ESTestCase { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); assertThat(topDocs.totalHits, equalTo(numDocs)); } - final MockDirectoryWrapper directory = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class); - if (directory != null) { - // since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents - // this so we have to disable the check explicitly - directory.setPreventDoubleWrite(false); - } Translog.TranslogGeneration generation = engine.getTranslog().getGeneration(); engine.close(); diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index 25199caff91..cea4610685a 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -354,49 +354,6 @@ public class StoreTests extends ESTestCase { IOUtils.close(store); } - public void testRenameFile() throws IOException { - final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random(), false); - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); - { - IndexOutput output = store.directory().createOutput("foo.bar", IOContext.DEFAULT); - int iters = scaledRandomIntBetween(10, 100); - for (int i = 0; i < iters; i++) { - BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); - output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); - } - CodecUtil.writeFooter(output); - output.close(); - } - store.renameFile("foo.bar", "bar.foo"); - assertThat(numNonExtraFiles(store), is(1)); - final long lastChecksum; - try (IndexInput input = store.directory().openInput("bar.foo", IOContext.DEFAULT)) { - lastChecksum = CodecUtil.checksumEntireFile(input); - } - - try { - store.directory().openInput("foo.bar", IOContext.DEFAULT); - fail("file was renamed"); - } catch (FileNotFoundException | NoSuchFileException ex) { - // expected - } - { - IndexOutput output = store.directory().createOutput("foo.bar", IOContext.DEFAULT); - int iters = scaledRandomIntBetween(10, 100); - for (int i = 0; i < iters; i++) { - BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); - output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); - } - CodecUtil.writeFooter(output); - output.close(); - } - store.renameFile("foo.bar", "bar.foo"); - assertThat(numNonExtraFiles(store), is(1)); - assertDeleteContent(store, directoryService); - IOUtils.close(store); - } - public void testCheckIntegrity() throws IOException { Directory dir = newDirectory(); long luceneFileLength = 0; @@ -519,9 +476,6 @@ public class StoreTests extends ESTestCase { public LuceneManagedDirectoryService(Random random, boolean preventDoubleWrite) { super(new ShardId(INDEX_SETTINGS.getIndex(), 1), INDEX_SETTINGS); dir = StoreTests.newDirectory(random); - if (dir instanceof MockDirectoryWrapper) { - ((MockDirectoryWrapper) dir).setPreventDoubleWrite(preventDoubleWrite); - } this.random = random; } @@ -963,11 +917,8 @@ public class StoreTests extends ESTestCase { } writer.commit(); writer.close(); - MockDirectoryWrapper leaf = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class); - if (leaf != null) { - leaf.setPreventDoubleWrite(false); // I do this on purpose - } SegmentInfos segmentCommitInfos = store.readLastCommittedSegmentsInfo(); + store.directory().deleteFile(segmentCommitInfos.getSegmentsFileName()); try (IndexOutput out = store.directory().createOutput(segmentCommitInfos.getSegmentsFileName(), IOContext.DEFAULT)) { // empty file } diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java index 2b19b01f2c4..61b5d2eb319 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzer.java @@ -19,7 +19,7 @@ package org.elasticsearch.indices.analysis; -import org.apache.lucene.analysis.util.StopwordAnalyzerBase; +import org.apache.lucene.analysis.StopwordAnalyzerBase; public class DummyAnalyzer extends StopwordAnalyzerBase { diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java index 0640bf0d6f0..d66fd8596bb 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java @@ -19,9 +19,9 @@ package org.elasticsearch.search.suggest.phrase; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.LowerCaseFilter; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper; import org.apache.lucene.analysis.reverse.ReverseStringFilter; diff --git a/distribution/licenses/lucene-analyzers-common-6.1.0.jar.sha1 b/distribution/licenses/lucene-analyzers-common-6.1.0.jar.sha1 deleted file mode 100644 index 382bf79e91f..00000000000 --- a/distribution/licenses/lucene-analyzers-common-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -20c3c3a717a225df0b462216e70a57922a8edd28 \ No newline at end of file diff --git a/distribution/licenses/lucene-analyzers-common-6.2.0.jar.sha1 b/distribution/licenses/lucene-analyzers-common-6.2.0.jar.sha1 new file mode 100644 index 00000000000..57aec3f4ac2 --- /dev/null +++ b/distribution/licenses/lucene-analyzers-common-6.2.0.jar.sha1 @@ -0,0 +1 @@ +d254d52dd394b5079129f3d5f3bf4f2d44a5936e \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-6.1.0.jar.sha1 b/distribution/licenses/lucene-backward-codecs-6.1.0.jar.sha1 deleted file mode 100644 index f13099389d2..00000000000 --- a/distribution/licenses/lucene-backward-codecs-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cbbba4d2d0c1469e0cc3358489b72922ba4963bf \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-6.2.0.jar.sha1 b/distribution/licenses/lucene-backward-codecs-6.2.0.jar.sha1 new file mode 100644 index 00000000000..04aefc62f61 --- /dev/null +++ b/distribution/licenses/lucene-backward-codecs-6.2.0.jar.sha1 @@ -0,0 +1 @@ +b625bb21456b3c0d1e5e431bced125cb060c1abd \ No newline at end of file diff --git a/distribution/licenses/lucene-core-6.1.0.jar.sha1 b/distribution/licenses/lucene-core-6.1.0.jar.sha1 deleted file mode 100644 index 5c0b798a771..00000000000 --- a/distribution/licenses/lucene-core-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -763361bd8cb48161ae28d67a7ca20c4abf194ecb \ No newline at end of file diff --git a/distribution/licenses/lucene-core-6.2.0.jar.sha1 b/distribution/licenses/lucene-core-6.2.0.jar.sha1 new file mode 100644 index 00000000000..2d74124e624 --- /dev/null +++ b/distribution/licenses/lucene-core-6.2.0.jar.sha1 @@ -0,0 +1 @@ +849ee62525a294416802be2cacc66c80352f6f13 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-6.1.0.jar.sha1 b/distribution/licenses/lucene-grouping-6.1.0.jar.sha1 deleted file mode 100644 index 74a68a00f05..00000000000 --- a/distribution/licenses/lucene-grouping-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c40e8a1904557f67e3d14462d64865cf4a838123 \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-6.2.0.jar.sha1 b/distribution/licenses/lucene-grouping-6.2.0.jar.sha1 new file mode 100644 index 00000000000..6ba525a038f --- /dev/null +++ b/distribution/licenses/lucene-grouping-6.2.0.jar.sha1 @@ -0,0 +1 @@ +9527fedfd5acc624b2bb3f862bd99fb8f470b053 \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-6.1.0.jar.sha1 b/distribution/licenses/lucene-highlighter-6.1.0.jar.sha1 deleted file mode 100644 index b26247fef51..00000000000 --- a/distribution/licenses/lucene-highlighter-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6ab7c27256d3cd51022fb7130eb3e92391f24cdc \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-6.2.0.jar.sha1 b/distribution/licenses/lucene-highlighter-6.2.0.jar.sha1 new file mode 100644 index 00000000000..c258e3fb850 --- /dev/null +++ b/distribution/licenses/lucene-highlighter-6.2.0.jar.sha1 @@ -0,0 +1 @@ +7ca342372a3f45e32bbd21cecaa757e38eccb8a5 \ No newline at end of file diff --git a/distribution/licenses/lucene-join-6.1.0.jar.sha1 b/distribution/licenses/lucene-join-6.1.0.jar.sha1 deleted file mode 100644 index 2198e453138..00000000000 --- a/distribution/licenses/lucene-join-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -63411cef454a282c9ead56b25e0ca87daecbcf77 \ No newline at end of file diff --git a/distribution/licenses/lucene-join-6.2.0.jar.sha1 b/distribution/licenses/lucene-join-6.2.0.jar.sha1 new file mode 100644 index 00000000000..01989e96a58 --- /dev/null +++ b/distribution/licenses/lucene-join-6.2.0.jar.sha1 @@ -0,0 +1 @@ +da0b8de98511abd4fe9c7d48a353d17854c5ed46 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-6.1.0.jar.sha1 b/distribution/licenses/lucene-memory-6.1.0.jar.sha1 deleted file mode 100644 index 7937bebc519..00000000000 --- a/distribution/licenses/lucene-memory-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e6702402615fcca549457842a08d21c35375a632 \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-6.2.0.jar.sha1 b/distribution/licenses/lucene-memory-6.2.0.jar.sha1 new file mode 100644 index 00000000000..b8a4a87efe2 --- /dev/null +++ b/distribution/licenses/lucene-memory-6.2.0.jar.sha1 @@ -0,0 +1 @@ +bc9e075b1ee051c8e5246c237c38d8e71dab8a66 \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-6.1.0.jar.sha1 b/distribution/licenses/lucene-misc-6.1.0.jar.sha1 deleted file mode 100644 index 87e8d94d463..00000000000 --- a/distribution/licenses/lucene-misc-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9a4884f1eaa7ac8af4abb51b45d0175ef7a3e03f \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-6.2.0.jar.sha1 b/distribution/licenses/lucene-misc-6.2.0.jar.sha1 new file mode 100644 index 00000000000..f4e081865ad --- /dev/null +++ b/distribution/licenses/lucene-misc-6.2.0.jar.sha1 @@ -0,0 +1 @@ +94ddde6312566a4da4a50a88e453b6c82c759b41 \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-6.1.0.jar.sha1 b/distribution/licenses/lucene-queries-6.1.0.jar.sha1 deleted file mode 100644 index 687a6fa1ce6..00000000000 --- a/distribution/licenses/lucene-queries-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2239ddd94a44d627b667221bdf129681af85ba51 \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-6.2.0.jar.sha1 b/distribution/licenses/lucene-queries-6.2.0.jar.sha1 new file mode 100644 index 00000000000..f7270a23afe --- /dev/null +++ b/distribution/licenses/lucene-queries-6.2.0.jar.sha1 @@ -0,0 +1 @@ +dce47238f78e3e97d91dc6fefa9f46f07866bc2b \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-6.1.0.jar.sha1 b/distribution/licenses/lucene-queryparser-6.1.0.jar.sha1 deleted file mode 100644 index 69df3d6e695..00000000000 --- a/distribution/licenses/lucene-queryparser-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6b9e5e3dfc5b3c2689dcdc63d115c06be06c3837 \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-6.2.0.jar.sha1 b/distribution/licenses/lucene-queryparser-6.2.0.jar.sha1 new file mode 100644 index 00000000000..8e95aa600ec --- /dev/null +++ b/distribution/licenses/lucene-queryparser-6.2.0.jar.sha1 @@ -0,0 +1 @@ +17ef728ac15e668bfa1105321611548424637645 \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-6.1.0.jar.sha1 b/distribution/licenses/lucene-sandbox-6.1.0.jar.sha1 deleted file mode 100644 index 5790e6e19b1..00000000000 --- a/distribution/licenses/lucene-sandbox-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -53d35813f366d70ae0aef99d4f567d007290bdd2 \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-6.2.0.jar.sha1 b/distribution/licenses/lucene-sandbox-6.2.0.jar.sha1 new file mode 100644 index 00000000000..1f34be3033d --- /dev/null +++ b/distribution/licenses/lucene-sandbox-6.2.0.jar.sha1 @@ -0,0 +1 @@ +520183f7b9aba77a26e224760c420a3844b0631a \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-6.1.0.jar.sha1 b/distribution/licenses/lucene-spatial-6.1.0.jar.sha1 deleted file mode 100644 index 3031c7fbd96..00000000000 --- a/distribution/licenses/lucene-spatial-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -03ce415700267e5d329f2d01e599d13291aaef97 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-6.2.0.jar.sha1 b/distribution/licenses/lucene-spatial-6.2.0.jar.sha1 new file mode 100644 index 00000000000..22e81792e40 --- /dev/null +++ b/distribution/licenses/lucene-spatial-6.2.0.jar.sha1 @@ -0,0 +1 @@ +8dba929b66927b936fbc76103b109ad6c824daee \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-extras-6.1.0.jar.sha1 b/distribution/licenses/lucene-spatial-extras-6.1.0.jar.sha1 deleted file mode 100644 index 5d5c9fc40d2..00000000000 --- a/distribution/licenses/lucene-spatial-extras-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eed865fccebb3c0a1ec2bebba1eddaaf9295c385 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-extras-6.2.0.jar.sha1 b/distribution/licenses/lucene-spatial-extras-6.2.0.jar.sha1 new file mode 100644 index 00000000000..d5e8f379d78 --- /dev/null +++ b/distribution/licenses/lucene-spatial-extras-6.2.0.jar.sha1 @@ -0,0 +1 @@ +3b5a6ef5cd90c0218a72e9e2f7e60104be2447da \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-6.1.0.jar.sha1 b/distribution/licenses/lucene-spatial3d-6.1.0.jar.sha1 deleted file mode 100644 index 4735753ddc0..00000000000 --- a/distribution/licenses/lucene-spatial3d-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -365a48f8d019aeeea34de1e80b03344fe3d4401b \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-6.2.0.jar.sha1 b/distribution/licenses/lucene-spatial3d-6.2.0.jar.sha1 new file mode 100644 index 00000000000..d0ce5275a26 --- /dev/null +++ b/distribution/licenses/lucene-spatial3d-6.2.0.jar.sha1 @@ -0,0 +1 @@ +fcdb0567725722c5145149d1502848b6a96ec18d \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-6.1.0.jar.sha1 b/distribution/licenses/lucene-suggest-6.1.0.jar.sha1 deleted file mode 100644 index 548cba1d048..00000000000 --- a/distribution/licenses/lucene-suggest-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e39f0e09ff1e3d9874a3b072294b80cf6567e431 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-6.2.0.jar.sha1 b/distribution/licenses/lucene-suggest-6.2.0.jar.sha1 new file mode 100644 index 00000000000..39392ad1158 --- /dev/null +++ b/distribution/licenses/lucene-suggest-6.2.0.jar.sha1 @@ -0,0 +1 @@ +3d9d526c51f483d27f425c75d7e56bc0849242d6 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.1.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.1.0.jar.sha1 deleted file mode 100644 index ac5b533d138..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e722f2e6b4838ede6bf4f1c088fe7b261a7b7571 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.2.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.2.0.jar.sha1 new file mode 100644 index 00000000000..205aaae6e66 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-6.2.0.jar.sha1 @@ -0,0 +1 @@ +99764b20aba5443f8a181f7015a806443c589844 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.1.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.1.0.jar.sha1 deleted file mode 100644 index 96fef36cf4e..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f580ed2ea0dca073199daa1a190ac142b3426030 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.2.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.2.0.jar.sha1 new file mode 100644 index 00000000000..2a734f79a3f --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.2.0.jar.sha1 @@ -0,0 +1 @@ +68de5f298090b92aa9a803eb4f5aed0c9104e685 \ No newline at end of file diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java index eac3ceebc16..14fa5922c1d 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java @@ -85,8 +85,8 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory { String resourcePath = entry.getValue(); breakers[code] = parseRules(resourcePath, env); } - // cjkAsWords is not configurable yet. - ICUTokenizerConfig config = new DefaultICUTokenizerConfig(true) { + // cjkAsWords nor myanmarAsWords are not configurable yet. + ICUTokenizerConfig config = new DefaultICUTokenizerConfig(true, true) { @Override public BreakIterator getBreakIterator(int script) { if (breakers[script] != null) { diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.1.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.1.0.jar.sha1 deleted file mode 100644 index bb0e327d275..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc436a2a2324e95fb27678d85ca6fd5018a5cec6 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.2.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.2.0.jar.sha1 new file mode 100644 index 00000000000..749cb8ecde8 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.2.0.jar.sha1 @@ -0,0 +1 @@ +17ee76df332c0342a172790472b777086487a299 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/JapaneseStopTokenFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/JapaneseStopTokenFilterFactory.java index 7b760bc4f63..d10fe4089f2 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/JapaneseStopTokenFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/JapaneseStopTokenFilterFactory.java @@ -20,10 +20,10 @@ package org.elasticsearch.index.analysis; +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.StopFilter; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.core.StopFilter; import org.apache.lucene.analysis.ja.JapaneseAnalyzer; -import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiAnalyzerProvider.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiAnalyzerProvider.java index 21d9b804055..0c6ab2d3ea6 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiAnalyzerProvider.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiAnalyzerProvider.java @@ -19,10 +19,10 @@ package org.elasticsearch.index.analysis; +import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ja.JapaneseAnalyzer; import org.apache.lucene.analysis.ja.JapaneseTokenizer; import org.apache.lucene.analysis.ja.dict.UserDictionary; -import org.apache.lucene.analysis.util.CharArraySet; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.1.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.1.0.jar.sha1 deleted file mode 100644 index 94556c24056..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1debc0cb187cde2bb2bcb3fc8a468f820d25b440 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.2.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.2.0.jar.sha1 new file mode 100644 index 00000000000..359173e0084 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.2.0.jar.sha1 @@ -0,0 +1 @@ +8d2a6b8679563d9f044eb1cee580282b20d8e149 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.1.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.1.0.jar.sha1 deleted file mode 100644 index 5abfc22ee1a..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e898fbd5da085f7b041feb3537a34137d2db560 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.2.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.2.0.jar.sha1 new file mode 100644 index 00000000000..66e339bfa2f --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.2.0.jar.sha1 @@ -0,0 +1 @@ +ba3fd99d1cf47d31b82817accdb199fc7a8d838d \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.1.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.1.0.jar.sha1 deleted file mode 100644 index ac449309ee2..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -51a52b0cd4ba5e686201917e65393feb56afd3a7 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.2.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.2.0.jar.sha1 new file mode 100644 index 00000000000..5cfb071f3a3 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.2.0.jar.sha1 @@ -0,0 +1 @@ +09d2a759a765f73e2e7becbc560411469c464cfa \ No newline at end of file diff --git a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java index 35ef0868eba..1b4fcd2922a 100644 --- a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java @@ -228,6 +228,8 @@ public class AnalysisFactoryTestCase extends ESTestCase { .put("fingerprint", Void.class) // for tee-sinks .put("daterecognizer", Void.class) + .put("minhash", Void.class) + .immutableMap(); static final Map> KNOWN_CHARFILTERS = new MapBuilder>() diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 1b99d2f32cb..1e84ca0fe71 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -172,7 +172,6 @@ public class MockFSDirectoryService extends FsDirectoryService { w.setRandomIOExceptionRateOnOpen(randomIOExceptionRateOnOpen); w.setThrottling(throttle); w.setCheckIndexOnClose(false); // we do this on the index level - w.setPreventDoubleWrite(preventDoubleWrite); // TODO: make this test robust to virus scanner w.setAssertNoDeleteOpenFile(false); w.setUseSlowOpenClosers(false);