From 314f0749aeb26c1937be56f51eb9ba7101168fd6 Mon Sep 17 00:00:00 2001 From: David Pilato Date: Sat, 26 Sep 2015 11:13:50 +0200 Subject: [PATCH 01/35] discovery-ec2 plugin should check `discovery.type` As done in #13809 and in Azure, we should check that `discovery.type` is set to `ec2` before starting services. Closes #13581. --- .../elasticsearch/cloud/aws/Ec2Module.java | 17 +++++++ .../discovery/ec2/Ec2Discovery.java | 4 +- .../discovery/ec2/Ec2DiscoveryPlugin.java | 16 ++++++- .../ec2/Ec2DiscoverySettingsTests.java | 44 +++++++++++++++++++ 4 files changed, 77 insertions(+), 4 deletions(-) create mode 100644 plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoverySettingsTests.java diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java index 4029e1b6326..09a0116fc6d 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java @@ -20,6 +20,9 @@ package org.elasticsearch.cloud.aws; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.ec2.Ec2Discovery; public class Ec2Module extends AbstractModule { @@ -27,4 +30,18 @@ public class Ec2Module extends AbstractModule { protected void configure() { bind(AwsEc2Service.class).to(AwsEc2ServiceImpl.class).asEagerSingleton(); } + + /** + * Check if discovery is meant to start + * @return true if we can start discovery features + */ + public static boolean isEc2DiscoveryActive(Settings settings, ESLogger logger) { + // User set discovery.type: ec2 + if (!Ec2Discovery.EC2.equalsIgnoreCase(settings.get("discovery.type"))) { + logger.trace("discovery.type not set to {}", Ec2Discovery.EC2); + return false; + } + + return true; + } } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java index b599541604c..e94b7618d12 100755 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2Discovery.java @@ -21,8 +21,6 @@ package org.elasticsearch.discovery.ec2; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.settings.ClusterDynamicSettings; -import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoverySettings; @@ -38,6 +36,8 @@ import org.elasticsearch.transport.TransportService; */ public class Ec2Discovery extends ZenDiscovery { + public static final String EC2 = "ec2"; + @Inject public Ec2Discovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService, ClusterService clusterService, NodeSettingsService nodeSettingsService, ZenPingService pingService, diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java index 36a854489c4..4cf876677a2 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java @@ -23,6 +23,9 @@ import org.elasticsearch.cloud.aws.AwsEc2ServiceImpl; import org.elasticsearch.cloud.aws.Ec2Module; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.ec2.AwsEc2UnicastHostsProvider; import org.elasticsearch.discovery.ec2.Ec2Discovery; @@ -55,6 +58,13 @@ public class Ec2DiscoveryPlugin extends Plugin { }); } + private final Settings settings; + protected final ESLogger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class); + + public Ec2DiscoveryPlugin(Settings settings) { + this.settings = settings; + } + @Override public String name() { return "discovery-ec2"; @@ -80,7 +90,9 @@ public class Ec2DiscoveryPlugin extends Plugin { } public void onModule(DiscoveryModule discoveryModule) { - discoveryModule.addDiscoveryType("ec2", Ec2Discovery.class); - discoveryModule.addUnicastHostProvider(AwsEc2UnicastHostsProvider.class); + if (Ec2Module.isEc2DiscoveryActive(settings, logger)) { + discoveryModule.addDiscoveryType("ec2", Ec2Discovery.class); + discoveryModule.addUnicastHostProvider(AwsEc2UnicastHostsProvider.class); + } } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoverySettingsTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoverySettingsTests.java new file mode 100644 index 00000000000..f0dfe960c80 --- /dev/null +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoverySettingsTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.ec2; + +import org.elasticsearch.cloud.aws.Ec2Module; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class Ec2DiscoverySettingsTests extends ESTestCase { + + public void testDiscoveryReady() { + Settings settings = Settings.builder() + .put("discovery.type", "ec2") + .build(); + boolean discoveryReady = Ec2Module.isEc2DiscoveryActive(settings, logger); + assertThat(discoveryReady, is(true)); + } + + public void testDiscoveryNotReady() { + Settings settings = Settings.EMPTY; + boolean discoveryReady = Ec2Module.isEc2DiscoveryActive(settings, logger); + assertThat(discoveryReady, is(false)); + } + +} From 433774fa6a6a6a722fb616f1eb332e65cf832469 Mon Sep 17 00:00:00 2001 From: mikemccand Date: Wed, 30 Sep 2015 12:59:00 +0200 Subject: [PATCH 02/35] close TokenStream in finally --- .../elasticsearch/search/suggest/SuggestUtils.java | 12 ++++++------ .../suggest/phrase/NoisyChannelSpellChecker.java | 7 ++++--- .../search/suggest/phrase/PhraseSuggester.java | 12 +++++++----- 3 files changed, 17 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java index bcf8cee64c2..6d2767c1f4e 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java @@ -116,12 +116,13 @@ public final class SuggestUtils { } public static int analyze(Analyzer analyzer, CharsRef toAnalyze, String field, TokenConsumer consumer) throws IOException { - TokenStream ts = analyzer.tokenStream( - field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length) - ); - return analyze(ts, consumer); + try (TokenStream ts = analyzer.tokenStream( + field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length))) { + return analyze(ts, consumer); + } } - + + /** NOTE: caller must close the TokenStream */ public static int analyze(TokenStream stream, TokenConsumer consumer) throws IOException { stream.reset(); consumer.reset(stream); @@ -131,7 +132,6 @@ public final class SuggestUtils { numTokens++; } consumer.end(); - stream.close(); return numTokens; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java index ec9ca6e1da2..ac03a736526 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -133,11 +133,12 @@ public final class NoisyChannelSpellChecker { public Result getCorrections(Analyzer analyzer, BytesRef query, CandidateGenerator generator, float maxErrors, int numCorrections, IndexReader reader, String analysisField, WordScorer scorer, float confidence, int gramSize) throws IOException { - - return getCorrections(tokenStream(analyzer, query, new CharsRefBuilder(), analysisField), generator, maxErrors, numCorrections, scorer, confidence, gramSize); - + try (TokenStream ts = tokenStream(analyzer, query, new CharsRefBuilder(), analysisField)) { + return getCorrections(ts, generator, maxErrors, numCorrections, scorer, confidence, gramSize); + } } + /** NOTE: caller must close returned TokenStream */ public TokenStream tokenStream(Analyzer analyzer, BytesRef query, CharsRefBuilder spare, String field) throws IOException { spare.copyUTF8Bytes(query); return analyzer.tokenStream(field, new FastCharArrayReader(spare.chars(), 0, spare.length())); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index e7d0eb378c3..232b9e6d21c 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -92,12 +92,14 @@ public final class PhraseSuggester extends Suggester { if (gens.size() > 0 && suggestTerms != null) { final NoisyChannelSpellChecker checker = new NoisyChannelSpellChecker(realWordErrorLikelihood, suggestion.getRequireUnigram(), suggestion.getTokenLimit()); final BytesRef separator = suggestion.separator(); - TokenStream stream = checker.tokenStream(suggestion.getAnalyzer(), suggestion.getText(), spare, suggestion.getField()); + Result checkerResult; + try (TokenStream stream = checker.tokenStream(suggestion.getAnalyzer(), suggestion.getText(), spare, suggestion.getField())) { - WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator); - Result checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), - gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(), - suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize()); + WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator); + checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), + gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(), + suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize()); + } PhraseSuggestion.Entry resultEntry = buildResultEntry(suggestion, spare, checkerResult.cutoffScore); response.addTerm(resultEntry); From bb613bcacdb5d643ef5fc8820500599db1e083b0 Mon Sep 17 00:00:00 2001 From: mikemccand Date: Wed, 30 Sep 2015 15:44:35 +0200 Subject: [PATCH 03/35] move close responsibility back down to SuggestUtils.analyze --- .../search/suggest/SuggestUtils.java | 37 +++++++++++++------ .../phrase/NoisyChannelSpellChecker.java | 7 ++-- .../suggest/phrase/PhraseSuggester.java | 12 +++--- 3 files changed, 33 insertions(+), 23 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java index 6d2767c1f4e..ce994d0993c 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java @@ -28,6 +28,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; +import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.automaton.LevenshteinAutomata; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; @@ -116,22 +117,34 @@ public final class SuggestUtils { } public static int analyze(Analyzer analyzer, CharsRef toAnalyze, String field, TokenConsumer consumer) throws IOException { - try (TokenStream ts = analyzer.tokenStream( - field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length))) { - return analyze(ts, consumer); - } + TokenStream ts = analyzer.tokenStream( + field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length) + ); + return analyze(ts, consumer); } - - /** NOTE: caller must close the TokenStream */ + + /** NOTE: this method closes the TokenStream, even on exception, which is awkward + * because really the caller who called {@link Analyzer#tokenStream} should close it, + * but when trying that there are recursion issues when we try to use the same + * TokenStrem twice in the same recursion... */ public static int analyze(TokenStream stream, TokenConsumer consumer) throws IOException { - stream.reset(); - consumer.reset(stream); int numTokens = 0; - while (stream.incrementToken()) { - consumer.nextToken(); - numTokens++; + boolean success = false; + try { + stream.reset(); + consumer.reset(stream); + while (stream.incrementToken()) { + consumer.nextToken(); + numTokens++; + } + consumer.end(); + } finally { + if (success) { + stream.close(); + } else { + IOUtils.closeWhileHandlingException(stream); + } } - consumer.end(); return numTokens; } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java index ac03a736526..ec9ca6e1da2 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -133,12 +133,11 @@ public final class NoisyChannelSpellChecker { public Result getCorrections(Analyzer analyzer, BytesRef query, CandidateGenerator generator, float maxErrors, int numCorrections, IndexReader reader, String analysisField, WordScorer scorer, float confidence, int gramSize) throws IOException { - try (TokenStream ts = tokenStream(analyzer, query, new CharsRefBuilder(), analysisField)) { - return getCorrections(ts, generator, maxErrors, numCorrections, scorer, confidence, gramSize); - } + + return getCorrections(tokenStream(analyzer, query, new CharsRefBuilder(), analysisField), generator, maxErrors, numCorrections, scorer, confidence, gramSize); + } - /** NOTE: caller must close returned TokenStream */ public TokenStream tokenStream(Analyzer analyzer, BytesRef query, CharsRefBuilder spare, String field) throws IOException { spare.copyUTF8Bytes(query); return analyzer.tokenStream(field, new FastCharArrayReader(spare.chars(), 0, spare.length())); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index 232b9e6d21c..e7d0eb378c3 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -92,14 +92,12 @@ public final class PhraseSuggester extends Suggester { if (gens.size() > 0 && suggestTerms != null) { final NoisyChannelSpellChecker checker = new NoisyChannelSpellChecker(realWordErrorLikelihood, suggestion.getRequireUnigram(), suggestion.getTokenLimit()); final BytesRef separator = suggestion.separator(); - Result checkerResult; - try (TokenStream stream = checker.tokenStream(suggestion.getAnalyzer(), suggestion.getText(), spare, suggestion.getField())) { + TokenStream stream = checker.tokenStream(suggestion.getAnalyzer(), suggestion.getText(), spare, suggestion.getField()); - WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator); - checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), - gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(), - suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize()); - } + WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator); + Result checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), + gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(), + suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize()); PhraseSuggestion.Entry resultEntry = buildResultEntry(suggestion, spare, checkerResult.cutoffScore); response.addTerm(resultEntry); From dc01450d955fe7cdba5228ed27fbc7aa9795010f Mon Sep 17 00:00:00 2001 From: mikemccand Date: Wed, 30 Sep 2015 17:37:26 +0200 Subject: [PATCH 04/35] cutover more Analyzer.tokenStream to try-with-resources --- .../classic/MapperQueryParser.java | 93 +++++++++++-------- .../analyzing/XAnalyzingSuggester.java | 6 +- .../analyze/TransportAnalyzeAction.java | 7 +- .../index/analysis/Analysis.java | 4 +- .../mapper/core/TokenCountFieldMapper.java | 13 +-- .../MultiDocumentPercolatorIndex.java | 9 +- .../SingleDocumentPercolatorIndex.java | 9 +- .../search/highlight/PlainHighlighter.java | 30 +++--- .../search/suggest/SuggestUtils.java | 8 +- .../completion/CompletionTokenStream.java | 4 +- .../suggest/phrase/PhraseSuggester.java | 11 ++- .../core/TokenCountFieldMapperTests.java | 13 ++- 12 files changed, 112 insertions(+), 95 deletions(-) diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 3a61daeca12..916d049312c 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -26,6 +26,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.index.Term; import org.apache.lucene.search.*; import org.apache.lucene.util.automaton.RegExp; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.MappedFieldType; @@ -484,30 +485,31 @@ public class MapperQueryParser extends QueryParser { if (!settings.analyzeWildcard()) { return super.getPrefixQuery(field, termStr); } + List tlist; // get Analyzer from superclass and tokenize the term - TokenStream source; + TokenStream source = null; try { - source = getAnalyzer().tokenStream(field, termStr); - source.reset(); - } catch (IOException e) { - return super.getPrefixQuery(field, termStr); - } - List tlist = new ArrayList<>(); - CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class); - - while (true) { try { - if (!source.incrementToken()) break; + source = getAnalyzer().tokenStream(field, termStr); + source.reset(); } catch (IOException e) { - break; + return super.getPrefixQuery(field, termStr); } - tlist.add(termAtt.toString()); - } + tlist = new ArrayList<>(); + CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class); - try { - source.close(); - } catch (IOException e) { - // ignore + while (true) { + try { + if (!source.incrementToken()) break; + } catch (IOException e) { + break; + } + tlist.add(termAtt.toString()); + } + } finally { + if (source != null) { + IOUtils.closeWhileHandlingException(source); + } } if (tlist.size() == 1) { @@ -619,21 +621,30 @@ public class MapperQueryParser extends QueryParser { if (isWithinToken) { try { TokenStream source = getAnalyzer().tokenStream(field, tmp.toString()); - source.reset(); - CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class); - if (source.incrementToken()) { - String term = termAtt.toString(); - if (term.length() == 0) { + boolean success = false; + try { + source.reset(); + CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class); + if (source.incrementToken()) { + String term = termAtt.toString(); + if (term.length() == 0) { + // no tokens, just use what we have now + aggStr.append(tmp); + } else { + aggStr.append(term); + } + } else { // no tokens, just use what we have now aggStr.append(tmp); - } else { - aggStr.append(term); } - } else { - // no tokens, just use what we have now - aggStr.append(tmp); + success = true; + } finally { + if (success) { + source.close(); + } else { + IOUtils.close(source); + } } - source.close(); } catch (IOException e) { aggStr.append(tmp); } @@ -648,22 +659,22 @@ public class MapperQueryParser extends QueryParser { } if (isWithinToken) { try { - TokenStream source = getAnalyzer().tokenStream(field, tmp.toString()); - source.reset(); - CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class); - if (source.incrementToken()) { - String term = termAtt.toString(); - if (term.length() == 0) { + try (TokenStream source = getAnalyzer().tokenStream(field, tmp.toString())) { + source.reset(); + CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class); + if (source.incrementToken()) { + String term = termAtt.toString(); + if (term.length() == 0) { + // no tokens, just use what we have now + aggStr.append(tmp); + } else { + aggStr.append(term); + } + } else { // no tokens, just use what we have now aggStr.append(tmp); - } else { - aggStr.append(term); } - } else { - // no tokens, just use what we have now - aggStr.append(tmp); } - source.close(); } catch (IOException e) { aggStr.append(tmp); } diff --git a/core/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java b/core/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java index 5db4f932c67..b2b23a29981 100644 --- a/core/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java +++ b/core/src/main/java/org/apache/lucene/search/suggest/analyzing/XAnalyzingSuggester.java @@ -959,11 +959,9 @@ public long ramBytesUsed() { // TODO: is there a Reader from a CharSequence? // Turn tokenstream into automaton: Automaton automaton = null; - TokenStream ts = queryAnalyzer.tokenStream("", key.toString()); - try { + + try (TokenStream ts = queryAnalyzer.tokenStream("", key.toString())) { automaton = getTokenStreamToAutomaton().toAutomaton(ts); - } finally { - IOUtils.closeWhileHandlingException(ts); } automaton = replaceSep(automaton); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 42d05ea4637..4f7a605341e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -217,12 +217,10 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokens = new ArrayList<>(); - TokenStream stream = null; int lastPosition = -1; int lastOffset = 0; for (String text : request.text()) { - try { - stream = analyzer.tokenStream(field, text); + try (TokenStream stream = analyzer.tokenStream(field, text)) { stream.reset(); CharTermAttribute term = stream.addAttribute(CharTermAttribute.class); PositionIncrementAttribute posIncr = stream.addAttribute(PositionIncrementAttribute.class); @@ -243,11 +241,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction> and pass previous, // like the indexer does - TokenStream tokenStream = field.tokenStream(analyzer, null); - if (tokenStream != null) { - memoryIndex.addField(field.name(), tokenStream, field.boost()); - } + try (TokenStream tokenStream = field.tokenStream(analyzer, null)) { + if (tokenStream != null) { + memoryIndex.addField(field.name(), tokenStream, field.boost()); + } + } } catch (IOException e) { throw new ElasticsearchException("Failed to create token stream", e); } diff --git a/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java b/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java index 3233cdcd756..1271872cab6 100644 --- a/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java +++ b/core/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java @@ -56,10 +56,11 @@ class SingleDocumentPercolatorIndex implements PercolatorIndex { Analyzer analyzer = context.mapperService().documentMapper(parsedDocument.type()).mappers().indexAnalyzer(); // TODO: instead of passing null here, we can have a CTL> and pass previous, // like the indexer does - TokenStream tokenStream = field.tokenStream(analyzer, null); - if (tokenStream != null) { - memoryIndex.addField(field.name(), tokenStream, field.boost()); - } + try (TokenStream tokenStream = field.tokenStream(analyzer, null)) { + if (tokenStream != null) { + memoryIndex.addField(field.name(), tokenStream, field.boost()); + } + } } catch (Exception e) { throw new ElasticsearchException("Failed to create token stream for [" + field.name() + "]", e); } diff --git a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java index d50c53a1380..041ed754d76 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.highlight.SimpleSpanFragmenter; import org.apache.lucene.search.highlight.TextFragment; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.CollectionUtil; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; @@ -109,15 +110,16 @@ public class PlainHighlighter implements Highlighter { for (Object textToHighlight : textsToHighlight) { String text = textToHighlight.toString(); - TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().names().indexName(), text); - if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { - // can't perform highlighting if the stream has no terms (binary token stream) or no offsets - continue; - } - TextFragment[] bestTextFragments = entry.getBestTextFragments(tokenStream, text, false, numberOfFragments); - for (TextFragment bestTextFragment : bestTextFragments) { - if (bestTextFragment != null && bestTextFragment.getScore() > 0) { - fragsList.add(bestTextFragment); + try (TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().names().indexName(), text)) { + if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { + // can't perform highlighting if the stream has no terms (binary token stream) or no offsets + continue; + } + TextFragment[] bestTextFragments = entry.getBestTextFragments(tokenStream, text, false, numberOfFragments); + for (TextFragment bestTextFragment : bestTextFragments) { + if (bestTextFragment != null && bestTextFragment.getScore() > 0) { + fragsList.add(bestTextFragment); + } } } } @@ -165,7 +167,7 @@ public class PlainHighlighter implements Highlighter { String fieldContents = textsToHighlight.get(0).toString(); int end; try { - end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer.tokenStream(mapper.fieldType().names().indexName(), fieldContents)); + end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, mapper.fieldType().names().indexName(), fieldContents); } catch (Exception e) { throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } @@ -181,8 +183,8 @@ public class PlainHighlighter implements Highlighter { return true; } - private static int findGoodEndForNoHighlightExcerpt(int noMatchSize, TokenStream tokenStream) throws IOException { - try { + private static int findGoodEndForNoHighlightExcerpt(int noMatchSize, Analyzer analyzer, String fieldName, String contents) throws IOException { + try (TokenStream tokenStream = analyzer.tokenStream(fieldName, contents)) { if (!tokenStream.hasAttribute(OffsetAttribute.class)) { // Can't split on term boundaries without offsets return -1; @@ -200,11 +202,9 @@ public class PlainHighlighter implements Highlighter { } end = attr.endOffset(); } + tokenStream.end(); // We've exhausted the token stream so we should just highlight everything. return end; - } finally { - tokenStream.end(); - tokenStream.close(); } } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java index ce994d0993c..8dd193f6c24 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java @@ -117,10 +117,10 @@ public final class SuggestUtils { } public static int analyze(Analyzer analyzer, CharsRef toAnalyze, String field, TokenConsumer consumer) throws IOException { - TokenStream ts = analyzer.tokenStream( - field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length) - ); - return analyze(ts, consumer); + try (TokenStream ts = analyzer.tokenStream( + field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length))) { + return analyze(ts, consumer); + } } /** NOTE: this method closes the TokenStream, even on exception, which is awkward diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionTokenStream.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionTokenStream.java index ebcf0456f87..5edf848dda3 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionTokenStream.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionTokenStream.java @@ -100,9 +100,7 @@ public final class CompletionTokenStream extends TokenStream { @Override public void close() throws IOException { - if (posInc == -1) { - input.close(); - } + input.close(); } public static interface ToFiniteStrings { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index e7d0eb378c3..724e3d40e25 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -92,12 +92,13 @@ public final class PhraseSuggester extends Suggester { if (gens.size() > 0 && suggestTerms != null) { final NoisyChannelSpellChecker checker = new NoisyChannelSpellChecker(realWordErrorLikelihood, suggestion.getRequireUnigram(), suggestion.getTokenLimit()); final BytesRef separator = suggestion.separator(); - TokenStream stream = checker.tokenStream(suggestion.getAnalyzer(), suggestion.getText(), spare, suggestion.getField()); - WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator); - Result checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), - gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(), - suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize()); + Result checkerResult; + try (TokenStream stream = checker.tokenStream(suggestion.getAnalyzer(), suggestion.getText(), spare, suggestion.getField())) { + checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), + gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(), + suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize()); + } PhraseSuggestion.Entry resultEntry = buildResultEntry(suggestion, spare, checkerResult.cutoffScore); response.addTerm(resultEntry); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java index 818366647d1..5a644e56f48 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.index.mapper.core; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CannedTokenStream; +import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.Token; import org.apache.lucene.analysis.TokenStream; import org.elasticsearch.common.xcontent.XContentFactory; @@ -87,7 +89,14 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { int finalTokenIncrement = 4; // Count the final token increment on the rare token streams that have them Token[] tokens = new Token[] {t1, t2, t3}; Collections.shuffle(Arrays.asList(tokens), getRandom()); - TokenStream tokenStream = new CannedTokenStream(finalTokenIncrement, 0, tokens); - assertThat(TokenCountFieldMapper.countPositions(tokenStream), equalTo(7)); + final TokenStream tokenStream = new CannedTokenStream(finalTokenIncrement, 0, tokens); + // TODO: we have no CannedAnalyzer? + Analyzer analyzer = new Analyzer() { + @Override + public TokenStreamComponents createComponents(String fieldName) { + return new TokenStreamComponents(new MockTokenizer(), tokenStream); + } + }; + assertThat(TokenCountFieldMapper.countPositions(analyzer, "", ""), equalTo(7)); } } From a321300e9c1faeb67fa0fe0a906c968352fae873 Mon Sep 17 00:00:00 2001 From: mikemccand Date: Wed, 30 Sep 2015 17:46:22 +0200 Subject: [PATCH 05/35] another try-with-resources --- .../classic/MapperQueryParser.java | 31 ++++++------------- 1 file changed, 10 insertions(+), 21 deletions(-) diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 916d049312c..ca1524f1214 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -619,31 +619,20 @@ public class MapperQueryParser extends QueryParser { char c = termStr.charAt(i); if (c == '?' || c == '*') { if (isWithinToken) { - try { - TokenStream source = getAnalyzer().tokenStream(field, tmp.toString()); - boolean success = false; - try { - source.reset(); - CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class); - if (source.incrementToken()) { - String term = termAtt.toString(); - if (term.length() == 0) { - // no tokens, just use what we have now - aggStr.append(tmp); - } else { - aggStr.append(term); - } - } else { + try (TokenStream source = getAnalyzer().tokenStream(field, tmp.toString())) { + source.reset(); + CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class); + if (source.incrementToken()) { + String term = termAtt.toString(); + if (term.length() == 0) { // no tokens, just use what we have now aggStr.append(tmp); - } - success = true; - } finally { - if (success) { - source.close(); } else { - IOUtils.close(source); + aggStr.append(term); } + } else { + // no tokens, just use what we have now + aggStr.append(tmp); } } catch (IOException e) { aggStr.append(tmp); From cb0003ff6c4fab8fe3518f2bb324950db62380ba Mon Sep 17 00:00:00 2001 From: Jason Veatch Date: Thu, 1 Oct 2015 15:21:34 -0400 Subject: [PATCH 06/35] Clarify that aliases and indices can't share names --- docs/reference/indices/aliases.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 7312de48c5c..9a65c89837d 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -8,7 +8,7 @@ converting the alias name to the actual index name. An alias can also be mapped to more than one index, and when specifying it, the alias will automatically expand to the aliases indices. An alias can also be associated with a filter that will automatically be applied when -searching, and routing values. +searching, and routing values. An alias cannot have the same name as an index. Here is a sample of associating the alias `alias1` with index `test1`: From 8782c8e08df5917baa97bf1168f7fc994eb60f02 Mon Sep 17 00:00:00 2001 From: Jose Diaz-Gonzalez Date: Thu, 1 Oct 2015 16:34:19 -0400 Subject: [PATCH 07/35] Update link to Jepsen related test class --- docs/resiliency/index.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/resiliency/index.asciidoc b/docs/resiliency/index.asciidoc index 7ca7cf943ec..14fadeb35b7 100644 --- a/docs/resiliency/index.asciidoc +++ b/docs/resiliency/index.asciidoc @@ -189,7 +189,7 @@ Make write calls return the number of total/successful/missing shards in the sam [float] === Jepsen Test Failures (STATUS: ONGOING) -We have increased our test coverage to include scenarios tested by Jepsen. We make heavy use of randomization to expand on the scenarios that can be tested and to introduce new error conditions. You can follow the work on the master branch of the https://github.com/elastic/elasticsearch/blob/master/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsTests.java[`DiscoveryWithServiceDisruptions` class], where we will add more tests as time progresses. +We have increased our test coverage to include scenarios tested by Jepsen. We make heavy use of randomization to expand on the scenarios that can be tested and to introduce new error conditions. You can follow the work on the master branch of the https://github.com/elastic/elasticsearch/blob/master/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java[`DiscoveryWithServiceDisruptionsIT` class], where we will add more tests as time progresses. [float] === Document guarantees and handling of failure (STATUS: ONGOING) From 8788516e1b0c9727e338b0edfeed97f69af516de Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 2 Oct 2015 16:23:01 +0200 Subject: [PATCH 08/35] Merge pull request #13868 from hafkensite/patch-1 Update example with parameters --- docs/reference/indices/optimize.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/indices/optimize.asciidoc b/docs/reference/indices/optimize.asciidoc index acb08c45d44..799f0674fec 100644 --- a/docs/reference/indices/optimize.asciidoc +++ b/docs/reference/indices/optimize.asciidoc @@ -20,7 +20,7 @@ $ curl -XPOST 'http://localhost:9200/twitter/_optimize' [[optimize-parameters]] === Request Parameters -The optimize API accepts the following request parameters: +The optimize API accepts the following request parameters as query arguments: [horizontal] `max_num_segments`:: The number of segments to optimize to. To fully @@ -48,5 +48,5 @@ call, or even on `_all` the indices. -------------------------------------------------- $ curl -XPOST 'http://localhost:9200/kimchy,elasticsearch/_optimize' -$ curl -XPOST 'http://localhost:9200/_optimize' +$ curl -XPOST 'http://localhost:9200/_optimize?only_expunge_deletes=true' -------------------------------------------------- From 5f1b867158b134aa26d3a3f7def1a82fbacef1c4 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 2 Oct 2015 16:43:53 +0200 Subject: [PATCH 09/35] Update get-settings.asciidoc Fixed docs for filtering index settings in get-settings API Closes #13872 --- docs/reference/indices/get-settings.asciidoc | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/docs/reference/indices/get-settings.asciidoc b/docs/reference/indices/get-settings.asciidoc index a5950c2ee4b..4689c448b56 100644 --- a/docs/reference/indices/get-settings.asciidoc +++ b/docs/reference/indices/get-settings.asciidoc @@ -28,23 +28,12 @@ curl -XGET 'http://localhost:9200/2013-*/_settings' -------------------------------------------------- [float] -=== Prefix option +=== Filtering settings by name -There is also support for a `prefix` query string option -that allows to include only settings matches the specified prefix. +The settings that are returned can be filtered with wildcard matching +as follows: [source,js] -------------------------------------------------- -curl -XGET 'http://localhost:9200/my-index/_settings?prefix=index.' - -curl -XGET 'http://localhost:9200/_all/_settings?prefix=index.routing.allocation.' - -curl -XGET 'http://localhost:9200/2013-*/_settings?name=index.merge.*' - -curl -XGET 'http://localhost:9200/2013-*/_settings/index.merge.*' +curl -XGET 'http://localhost:9200/2013-*/_settings/name=index.number_*' -------------------------------------------------- - -The first example returns all index settings the start with `index.` in the index `my-index`, -the second example gets all index settings that start with `index.routing.allocation.` for -all indices, lastly the third example returns all index settings that start with `index.merge.` -in indices that start with `2013-`. From a5f9cd98d8f0bc2086960c1dfe084c22bc2117e4 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 2 Oct 2015 17:42:40 +0200 Subject: [PATCH 10/35] Remove and forbid use of com.google.common.o.Resources This commit removes and now forbids all uses of com.google.common.io.Resources across the codebase. This is one of the few remaining steps in the eventual removal of Guava as a dependency. Relates #13224 --- .../search/suggest/SuggestSearchIT.java | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index e55a736a1de..85993fdf812 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -19,14 +19,13 @@ package org.elasticsearch.search.suggest; -import java.nio.charset.StandardCharsets; -import com.google.common.io.Resources; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.*; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; +import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder; @@ -38,6 +37,9 @@ import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.junit.Test; import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.util.*; import java.util.concurrent.ExecutionException; @@ -470,7 +472,7 @@ public class SuggestSearchIT extends ESIntegTestCase { @Test @Nightly - public void testMarvelHerosPhraseSuggest() throws IOException { + public void testMarvelHerosPhraseSuggest() throws IOException, URISyntaxException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.reverse.tokenizer", "standard") @@ -506,7 +508,7 @@ public class SuggestSearchIT extends ESIntegTestCase { assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); - for (String line: Resources.readLines(SuggestSearchIT.class.getResource("/config/names.txt"), StandardCharsets.UTF_8)) { + for (String line : readMarvelHeroNames()) { index("test", "type1", line, "body", line, "body_reverse", line, "bigram", line); } refresh(); @@ -597,7 +599,11 @@ public class SuggestSearchIT extends ESIntegTestCase { // Check the name this time because we're repeating it which is funky assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel")); } - + + private List readMarvelHeroNames() throws IOException, URISyntaxException { + return Files.readAllLines(PathUtils.get(SuggestSearchIT.class.getResource("/config/names.txt").toURI()), StandardCharsets.UTF_8); + } + @Test public void testSizePararm() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() @@ -666,7 +672,7 @@ public class SuggestSearchIT extends ESIntegTestCase { @Test @Nightly - public void testPhraseBoundaryCases() throws IOException { + public void testPhraseBoundaryCases() throws IOException, URISyntaxException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()).put(SETTING_NUMBER_OF_SHARDS, 1) // to get reliable statistics we should put this all into one shard .put("index.analysis.analyzer.body.tokenizer", "standard") @@ -698,7 +704,7 @@ public class SuggestSearchIT extends ESIntegTestCase { assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); - for (String line: Resources.readLines(SuggestSearchIT.class.getResource("/config/names.txt"), StandardCharsets.UTF_8)) { + for (String line : readMarvelHeroNames()) { index("test", "type1", line, "body", line, "bigram", line, "ngram", line); } refresh(); From 7447eb9842e46a5245d681e491bd0db8c9f4c7ce Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 2 Oct 2015 18:27:30 +0200 Subject: [PATCH 11/35] Remove and forbid use of com.google.common.collect.ImmutableCollection This commit removes and now forbids all uses of com.google.common.collect.ImmutableCollection across the codebase. This is one of the final steps in the eventual removal of Guava as a dependency. Relates #13224 --- .../org/elasticsearch/index/similarity/Similarities.java | 5 +++-- .../org/elasticsearch/script/ScriptContextRegistry.java | 9 ++------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/similarity/Similarities.java b/core/src/main/java/org/elasticsearch/index/similarity/Similarities.java index 4dbdca1e6e4..b40acb8a840 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/Similarities.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/Similarities.java @@ -19,12 +19,13 @@ package org.elasticsearch.index.similarity; -import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableMap; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.DefaultSimilarity; import org.elasticsearch.common.collect.MapBuilder; +import java.util.Collection; + /** * Cache of pre-defined Similarities */ @@ -49,7 +50,7 @@ public class Similarities { * * @return Pre-defined SimilarityProvider Factories */ - public static ImmutableCollection listFactories() { + public static Collection listFactories() { return PRE_BUILT_SIMILARITIES.values(); } } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptContextRegistry.java b/core/src/main/java/org/elasticsearch/script/ScriptContextRegistry.java index bf2b6679f3e..10a1c4266f7 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptContextRegistry.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptContextRegistry.java @@ -19,14 +19,9 @@ package org.elasticsearch.script; -import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableMap; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; +import java.util.*; import static java.util.Collections.unmodifiableSet; @@ -58,7 +53,7 @@ public final class ScriptContextRegistry { /** * @return a list that contains all the supported {@link ScriptContext}s, both standard ones and registered via plugins */ - ImmutableCollection scriptContexts() { + Collection scriptContexts() { return scriptContexts.values(); } From 8434c79429410864d4cb4a590587467e178969f2 Mon Sep 17 00:00:00 2001 From: xuzha Date: Fri, 2 Oct 2015 00:30:39 -0700 Subject: [PATCH 12/35] Update `cat allocation` doc --- docs/reference/cat/allocation.asciidoc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/reference/cat/allocation.asciidoc b/docs/reference/cat/allocation.asciidoc index 6fbdd9d43cc..4c354a814d9 100644 --- a/docs/reference/cat/allocation.asciidoc +++ b/docs/reference/cat/allocation.asciidoc @@ -7,10 +7,10 @@ and how much disk space they are using. [source,sh] -------------------------------------------------- % curl '192.168.56.10:9200/_cat/allocation?v' -shards diskUsed diskAvail diskRatio ip node - 1 5.6gb 72.2gb 7.8% 192.168.56.10 Jarella - 1 5.6gb 72.2gb 7.8% 192.168.56.30 Solarr - 1 5.5gb 72.3gb 7.6% 192.168.56.20 Adam II +shards disk.indices disk.used disk.avail disk.total disk.percent host ip node + 1 3.1gb 5.6gb 72.2gb 77.8gb 7.8 192.168.56.10 192.168.56.10 Jarella + 1 3.1gb 5.6gb 72.2gb 77.8gb 7.8 192.168.56.30 192.168.56.30 Solarr + 1 3.0gb 5.5gb 72.3gb 77.8gb 7.6 192.168.56.20 192.168.56.20 Adam II -------------------------------------------------- Here we can see that each node has been allocated a single shard and From 04e892634e488cef29d622006255fc9ad283d241 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 2 Oct 2015 01:24:18 +0200 Subject: [PATCH 13/35] Verify Checksum once it has been fully written to fail as soon as possible Today we are relying on calling Store.verify on the closed stream to validate the checksum. This is still necessary to catch file truncation but for an actually corrupted file or checksum we can fail early and check the checksum against the actual metadata once it's been fully written to the VerifyingIndexOutput. --- .../java/org/elasticsearch/index/store/Store.java | 3 +++ .../indices/recovery/RecoverySourceHandler.java | 1 - .../org/elasticsearch/index/store/StoreTests.java | 11 +++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index 3d158ff925e..b82973d8238 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -1292,6 +1292,9 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref final int index = Math.toIntExact(writtenBytes - checksumPosition); if (index < footerChecksum.length) { footerChecksum[index] = b; + if (index == footerChecksum.length-1) { + verify();// we have recorded the entire checksum + } } else { verify(); // fail if we write more than expected throw new AssertionError("write past EOF expected length: " + metadata.length() + " writtenBytes: " + writtenBytes); diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index d9638c51f47..854546f656d 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -674,7 +674,6 @@ public class RecoverySourceHandler { try (final OutputStream outputStream = outputStreamFactory.apply(md); final IndexInput indexInput = store.directory().openInput(md.name(), IOContext.READONCE)) { Streams.copy(new InputStreamIndexInput(indexInput, md.length()), outputStream); - Store.verify(indexInput); } return null; }); diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index 9386b6a20c0..144deddcc37 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -184,6 +184,7 @@ public class StoreTests extends ESTestCase { BytesRef ref = new BytesRef(scaledRandomIntBetween(1, 1024)); long length = indexInput.length(); IndexOutput verifyingOutput = new Store.LuceneVerifyingIndexOutput(new StoreFileMetaData("foo1.bar", length, checksum), dir.createOutput("foo1.bar", IOContext.DEFAULT)); + length -= 8; // we write the checksum in the try / catch block below while (length > 0) { if (random().nextInt(10) == 0) { verifyingOutput.writeByte(indexInput.readByte()); @@ -197,6 +198,16 @@ public class StoreTests extends ESTestCase { } try { + BytesRef checksumBytes = new BytesRef(8); + checksumBytes.length = 8; + indexInput.readBytes(checksumBytes.bytes, checksumBytes.offset, checksumBytes.length); + if (randomBoolean()) { + verifyingOutput.writeBytes(checksumBytes.bytes, checksumBytes.offset, checksumBytes.length); + } else { + for (int i = 0; i < checksumBytes.length; i++) { + verifyingOutput.writeByte(checksumBytes.bytes[i]); + } + } if (randomBoolean()) { appendRandomData(verifyingOutput); } else { From aa4a63354bfbbd825cabc2a091a330aa32533502 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 2 Oct 2015 16:57:44 -0400 Subject: [PATCH 14/35] Forbid use of com.google.common.io.Resources --- dev-tools/src/main/resources/forbidden/all-signatures.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/dev-tools/src/main/resources/forbidden/all-signatures.txt b/dev-tools/src/main/resources/forbidden/all-signatures.txt index c70ab262d07..68c6ea7317b 100644 --- a/dev-tools/src/main/resources/forbidden/all-signatures.txt +++ b/dev-tools/src/main/resources/forbidden/all-signatures.txt @@ -129,6 +129,7 @@ com.google.common.io.Files com.google.common.primitives.Ints com.google.common.collect.ImmutableSet com.google.common.collect.ImmutableSet$Builder +com.google.common.io.Resources @defaultMessage Do not violate java's access system java.lang.reflect.AccessibleObject#setAccessible(boolean) From 03a4e226f15f82765b37ea6398f7c71e2fba8e1f Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Sun, 27 Sep 2015 11:44:26 +0200 Subject: [PATCH 15/35] Snapshot restore operations throttle more than specified Lucene's RateLimiter can do too much sleeping on small values (see also #6018). The issue here is that calls to "pause" are not properly guarded in "restoreFile". Instead of simply adding the guard, this commit uses the RateLimitingInputStream similar as for "snapshotFile". Closes #13828 --- .../blobstore/BlobStoreIndexShardRepository.java | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java index c9344d382c8..ef335e2cc9a 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java @@ -26,6 +26,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RateLimiter; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterService; @@ -93,6 +94,8 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements private RateLimitingInputStream.Listener snapshotThrottleListener; + private RateLimitingInputStream.Listener restoreThrottleListener; + private boolean compress; private final ParseFieldMatcher parseFieldMatcher; @@ -147,6 +150,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements this.restoreRateLimiter = restoreRateLimiter; this.rateLimiterListener = rateLimiterListener; this.snapshotThrottleListener = nanos -> rateLimiterListener.onSnapshotPause(nanos); + this.restoreThrottleListener = nanos -> rateLimiterListener.onRestorePause(nanos); this.compress = compress; indexShardSnapshotFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_CODEC, SNAPSHOT_NAME_FORMAT, BlobStoreIndexShardSnapshot.PROTO, parseFieldMatcher, isCompress()); indexShardSnapshotLegacyFormat = new LegacyBlobStoreFormat<>(LEGACY_SNAPSHOT_NAME_FORMAT, BlobStoreIndexShardSnapshot.PROTO, parseFieldMatcher); @@ -890,16 +894,20 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements */ private void restoreFile(final FileInfo fileInfo) throws IOException { boolean success = false; - try (InputStream stream = new PartSliceStream(blobContainer, fileInfo)) { + + try (InputStream partSliceStream = new PartSliceStream(blobContainer, fileInfo)) { + final InputStream stream; + if (restoreRateLimiter == null) { + stream = partSliceStream; + } else { + stream = new RateLimitingInputStream(partSliceStream, restoreRateLimiter, restoreThrottleListener); + } try (final IndexOutput indexOutput = store.createVerifyingOutput(fileInfo.physicalName(), fileInfo.metadata(), IOContext.DEFAULT)) { final byte[] buffer = new byte[BUFFER_SIZE]; int length; while ((length = stream.read(buffer)) > 0) { indexOutput.writeBytes(buffer, 0, length); recoveryState.getIndex().addRecoveredBytesToFile(fileInfo.name(), length); - if (restoreRateLimiter != null) { - rateLimiterListener.onRestorePause(restoreRateLimiter.pause(length)); - } } Store.verify(indexOutput); indexOutput.close(); From b19be2c34aa6415cb744b2b7338e33e5d85c2165 Mon Sep 17 00:00:00 2001 From: xuzha Date: Wed, 30 Sep 2015 10:43:27 -0700 Subject: [PATCH 16/35] DiskThresholdDecider check data nodes number Right now, we allow allocation if there is only a single node in the cluster. it would be nice to fail open when there is only one data node (instead of only one node total). closes #9391 --- .../decider/DiskThresholdDecider.java | 8 +- .../decider/DiskThresholdDeciderTests.java | 133 +++++++++++++++++- 2 files changed, 135 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 9a6353a46f8..e1a0b777d66 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -598,12 +598,12 @@ public class DiskThresholdDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "disk threshold decider disabled"); } - // Allow allocation regardless if only a single node is available - if (allocation.nodes().size() <= 1) { + // Allow allocation regardless if only a single data node is available + if (allocation.nodes().dataNodes().size() <= 1) { if (logger.isTraceEnabled()) { - logger.trace("only a single node is present, allowing allocation"); + logger.trace("only a single data node is present, allowing allocation"); } - return allocation.decision(Decision.YES, NAME, "only a single node is present"); + return allocation.decision(Decision.YES, NAME, "only a single data node is present"); } // Fail open there is no info available diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index dfdd9ba5948..5852faf908e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -50,13 +50,11 @@ import org.elasticsearch.test.ESAllocationTestCase; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import org.junit.Test; -import java.util.AbstractMap; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; -import java.util.Set; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; @@ -912,6 +910,137 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { assertThat(result.routingTable().index("test").getShards().get(1).primaryShard().relocatingNodeId(), equalTo("node2")); } + public void testForSingleDataNode() { + Settings diskSettings = settingsBuilder() + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_INCLUDE_RELOCATIONS, true) + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, "60%") + .put(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK, "70%").build(); + + Map usages = new HashMap<>(); + usages.put("node1", new DiskUsage("node1", "n1", "/dev/null", 100, 100)); // 0% used + usages.put("node2", new DiskUsage("node2", "n2", "/dev/null", 100, 20)); // 80% used + usages.put("node3", new DiskUsage("node3", "n3", "/dev/null", 100, 100)); // 0% used + + // We have an index with 1 primary shards each taking 40 bytes. Each node has 100 bytes available + Map shardSizes = new HashMap<>(); + shardSizes.put("[test][0][p]", 40L); + shardSizes.put("[test][1][p]", 40L); + final ClusterInfo clusterInfo = new ClusterInfo(Collections.unmodifiableMap(usages), Collections.unmodifiableMap(usages), Collections.unmodifiableMap(shardSizes), MockInternalClusterInfoService.DEV_NULL_MAP); + + DiskThresholdDecider diskThresholdDecider = new DiskThresholdDecider(diskSettings); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(0)) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + logger.info("--> adding one master node, one data node"); + Map masterNodeAttributes = new HashMap<>(); + masterNodeAttributes.put("master", "true"); + masterNodeAttributes.put("data", "false"); + Map dataNodeAttributes = new HashMap<>(); + dataNodeAttributes.put("master", "false"); + dataNodeAttributes.put("data", "true"); + DiscoveryNode discoveryNode1 = new DiscoveryNode("", "node1", new LocalTransportAddress("1"), masterNodeAttributes, Version.CURRENT); + DiscoveryNode discoveryNode2 = new DiscoveryNode("", "node2", new LocalTransportAddress("2"), dataNodeAttributes, Version.CURRENT); + + DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().put(discoveryNode1).put(discoveryNode2).build(); + ClusterState baseClusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT) + .metaData(metaData) + .routingTable(routingTable) + .nodes(discoveryNodes) + .build(); + + // Two shards consumes 80% of disk space in data node, but we have only one data node, shards should remain. + ShardRouting firstRouting = TestShardRouting.newShardRouting("test", 0, "node2", null, null, true, ShardRoutingState.STARTED, 1); + ShardRouting secondRouting = TestShardRouting.newShardRouting("test", 1, "node2", null, null, true, ShardRoutingState.STARTED, 1); + RoutingNode firstRoutingNode = new RoutingNode("node2", discoveryNode2, Arrays.asList(firstRouting, secondRouting)); + + RoutingTable.Builder builder = RoutingTable.builder().add( + IndexRoutingTable.builder("test") + .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 0)) + .addShard(firstRouting) + .build() + ) + .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 1)) + .addShard(secondRouting) + .build() + ) + ); + ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder).build(); + RoutingAllocation routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), discoveryNodes, clusterInfo); + Decision decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); + + // Two shards should start happily + assertThat(decision.type(), equalTo(Decision.Type.YES)); + ClusterInfoService cis = new ClusterInfoService() { + @Override + public ClusterInfo getClusterInfo() { + logger.info("--> calling fake getClusterInfo"); + return clusterInfo; + } + + @Override + public void addListener(Listener listener) { + } + }; + + AllocationDeciders deciders = new AllocationDeciders(Settings.EMPTY, new HashSet<>(Arrays.asList( + new SameShardAllocationDecider(Settings.EMPTY), diskThresholdDecider + ))); + + AllocationService strategy = new AllocationService(settingsBuilder() + .put("cluster.routing.allocation.concurrent_recoveries", 10) + .put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, "always") + .put("cluster.routing.allocation.cluster_concurrent_rebalance", -1) + .build(), deciders, makeShardsAllocators(), cis); + RoutingAllocation.Result result = strategy.reroute(clusterState); + + assertThat(result.routingTable().index("test").getShards().get(0).primaryShard().state(), equalTo(STARTED)); + assertThat(result.routingTable().index("test").getShards().get(0).primaryShard().currentNodeId(), equalTo("node2")); + assertThat(result.routingTable().index("test").getShards().get(0).primaryShard().relocatingNodeId(), nullValue()); + assertThat(result.routingTable().index("test").getShards().get(1).primaryShard().state(), equalTo(STARTED)); + assertThat(result.routingTable().index("test").getShards().get(1).primaryShard().currentNodeId(), equalTo("node2")); + assertThat(result.routingTable().index("test").getShards().get(1).primaryShard().relocatingNodeId(), nullValue()); + + // Add another datanode, it should relocate. + logger.info("--> adding node3"); + DiscoveryNode discoveryNode3 = new DiscoveryNode("", "node3", new LocalTransportAddress("3"), dataNodeAttributes, Version.CURRENT); + ClusterState updateClusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) + .put(discoveryNode3)).build(); + + firstRouting = TestShardRouting.newShardRouting("test", 0, "node2", null, null, true, ShardRoutingState.STARTED, 1); + secondRouting = TestShardRouting.newShardRouting("test", 1, "node2", "node3", null, true, ShardRoutingState.RELOCATING, 1); + firstRoutingNode = new RoutingNode("node2", discoveryNode2, Arrays.asList(firstRouting, secondRouting)); + builder = RoutingTable.builder().add( + IndexRoutingTable.builder("test") + .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 0)) + .addShard(firstRouting) + .build() + ) + .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId("test", 1)) + .addShard(secondRouting) + .build() + ) + ); + + clusterState = ClusterState.builder(updateClusterState).routingTable(builder).build(); + routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), discoveryNodes, clusterInfo); + decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); + assertThat(decision.type(), equalTo(Decision.Type.YES)); + + result = strategy.reroute(clusterState); + assertThat(result.routingTable().index("test").getShards().get(0).primaryShard().state(), equalTo(STARTED)); + assertThat(result.routingTable().index("test").getShards().get(0).primaryShard().currentNodeId(), equalTo("node2")); + assertThat(result.routingTable().index("test").getShards().get(0).primaryShard().relocatingNodeId(), nullValue()); + assertThat(result.routingTable().index("test").getShards().get(1).primaryShard().state(), equalTo(RELOCATING)); + assertThat(result.routingTable().index("test").getShards().get(1).primaryShard().currentNodeId(), equalTo("node2")); + assertThat(result.routingTable().index("test").getShards().get(1).primaryShard().relocatingNodeId(), equalTo("node3")); + } + public void logShardStates(ClusterState state) { RoutingNodes rn = state.getRoutingNodes(); logger.info("--> counts: total: {}, unassigned: {}, initializing: {}, relocating: {}, started: {}", From 95406c470131ab8e01838e477ce9c93cd20de7f5 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Sun, 4 Oct 2015 21:11:02 +0200 Subject: [PATCH 17/35] Record all bytes of the checksum in VerifyingIndexOutput The fix in #13848 has an off by one issue where the first byte of the checksum was never written. Unfortunately most tests shadowed the problem and the first byte of the checksum seems to be very likely a 0 which causes only very rare failures. Relates to #13896 Relates to #13848 --- .../org/elasticsearch/index/store/Store.java | 21 ++++++------------- .../elasticsearch/index/store/StoreTests.java | 5 ----- 2 files changed, 6 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index b82973d8238..7fb1b40c460 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -1286,14 +1286,15 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref @Override public void writeByte(byte b) throws IOException { final long writtenBytes = this.writtenBytes++; - if (writtenBytes == checksumPosition) { - readAndCompareChecksum(); - } else if (writtenBytes > checksumPosition) { // we are writing parts of the checksum.... + if (writtenBytes >= checksumPosition) { // we are writing parts of the checksum.... + if (writtenBytes == checksumPosition) { + readAndCompareChecksum(); + } final int index = Math.toIntExact(writtenBytes - checksumPosition); if (index < footerChecksum.length) { footerChecksum[index] = b; if (index == footerChecksum.length-1) { - verify();// we have recorded the entire checksum + verify(); // we have recorded the entire checksum } } else { verify(); // fail if we write more than expected @@ -1315,16 +1316,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref @Override public void writeBytes(byte[] b, int offset, int length) throws IOException { if (writtenBytes + length > checksumPosition) { - if (actualChecksum == null) { - assert writtenBytes <= checksumPosition; - final int bytesToWrite = (int) (checksumPosition - writtenBytes); - out.writeBytes(b, offset, bytesToWrite); - readAndCompareChecksum(); - offset += bytesToWrite; - length -= bytesToWrite; - writtenBytes += bytesToWrite; - } - for (int i = 0; i < length; i++) { + for (int i = 0; i < length; i++) { // don't optimze writing the last block of bytes writeByte(b[offset+i]); } } else { @@ -1332,7 +1324,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref writtenBytes += length; } } - } /** diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index 144deddcc37..11d01c95095 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -208,11 +208,6 @@ public class StoreTests extends ESTestCase { verifyingOutput.writeByte(checksumBytes.bytes[i]); } } - if (randomBoolean()) { - appendRandomData(verifyingOutput); - } else { - Store.verify(verifyingOutput); - } fail("should be a corrupted index"); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // ok From 96206dfd2a6f52afa2fc0b682af7a321149c9254 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Sun, 4 Oct 2015 21:46:40 +0200 Subject: [PATCH 18/35] [TEST] Work around how OS / GIT handles line separator --- .../query/HasChildQueryBuilderTests.java | 31 ++++++++++++++++++- .../query/has-child-with-inner-hits.json | 30 ------------------ 2 files changed, 30 insertions(+), 31 deletions(-) delete mode 100644 core/src/test/resources/org/elasticsearch/index/query/has-child-with-inner-hits.json diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java index b69758271a1..c567ba3a0f3 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -190,7 +190,36 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase Date: Fri, 2 Oct 2015 17:18:26 +0200 Subject: [PATCH 19/35] Remove and forbid use of com.google.common.hash.* This commit removes and now forbids all uses of com.google.common.hash.HashCode, com.google.common.hash.HashFunction, and com.google.common.hash.Hashing across the codebase. This is one of the few remaining steps in the eventual removal of Guava as a dependency. Relates #13224 --- .../common/hash/MessageDigests.java | 80 ++++++++++++++++++ .../http/client/HttpDownloadHelper.java | 13 +-- .../murmur3/Murmur3HashFunctionTests.java | 28 +++---- .../common/hash/MessageDigestsTests.java | 81 +++++++++++++++++++ .../common/hashing/MurmurHash3Tests.java | 45 +++++------ .../plugins/PluginManagerIT.java | 11 +-- .../resources/forbidden/all-signatures.txt | 3 + .../groovy/GroovyScriptEngineService.java | 10 +-- 8 files changed, 213 insertions(+), 58 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/common/hash/MessageDigests.java create mode 100644 core/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java diff --git a/core/src/main/java/org/elasticsearch/common/hash/MessageDigests.java b/core/src/main/java/org/elasticsearch/common/hash/MessageDigests.java new file mode 100644 index 00000000000..7dc495f9c3b --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/hash/MessageDigests.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.hash; + +import org.elasticsearch.ElasticsearchException; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +public class MessageDigests { + + private static final MessageDigest MD5_DIGEST; + private static final MessageDigest SHA_1_DIGEST; + private static final MessageDigest SHA_256_DIGEST; + + static { + try { + MD5_DIGEST = MessageDigest.getInstance("MD5"); + SHA_1_DIGEST = MessageDigest.getInstance("SHA-1"); + SHA_256_DIGEST = MessageDigest.getInstance("SHA-256"); + } catch (NoSuchAlgorithmException e) { + throw new ElasticsearchException("Unexpected exception creating MessageDigest instance", e); + } + } + + public static MessageDigest md5() { + return cloneAndReset(MD5_DIGEST); + } + + + public static MessageDigest sha1() { + return cloneAndReset(SHA_1_DIGEST); + } + + public static MessageDigest sha256() { + return cloneAndReset(SHA_256_DIGEST); + } + + private static MessageDigest cloneAndReset(MessageDigest messageDigest) { + try { + MessageDigest clone = (MessageDigest) messageDigest.clone(); + clone.reset(); + return clone; + } catch (CloneNotSupportedException e) { + throw new ElasticsearchException("Unexpected exception cloning MessageDigest instance", e); + } + } + + private static final char[] HEX_DIGITS = "0123456789abcdef".toCharArray(); + public static String toHexString(byte[] bytes) { + if (bytes == null) { + throw new NullPointerException("bytes"); + } + StringBuilder sb = new StringBuilder(2 * bytes.length); + + for (int i = 0; i < bytes.length; i++) { + byte b = bytes[i]; + sb.append(HEX_DIGITS[b >> 4 & 0xf]).append(HEX_DIGITS[b & 0xf]); + } + + return sb.toString(); + } +} diff --git a/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java b/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java index ed2de6e5e7d..7fe26ed81d9 100644 --- a/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java +++ b/core/src/main/java/org/elasticsearch/common/http/client/HttpDownloadHelper.java @@ -19,19 +19,22 @@ package org.elasticsearch.common.http.client; -import java.nio.charset.StandardCharsets; -import com.google.common.hash.Hashing; import org.apache.lucene.util.IOUtils; -import org.elasticsearch.*; +import org.elasticsearch.Build; +import org.elasticsearch.ElasticsearchCorruptionException; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.Version; import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.unit.TimeValue; import java.io.*; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; @@ -96,7 +99,7 @@ public class HttpDownloadHelper { public static Checksummer SHA1_CHECKSUM = new Checksummer() { @Override public String checksum(byte[] filebytes) { - return Hashing.sha1().hashBytes(filebytes).toString(); + return MessageDigests.toHexString(MessageDigests.sha1().digest(filebytes)); } @Override @@ -109,7 +112,7 @@ public class HttpDownloadHelper { public static Checksummer MD5_CHECKSUM = new Checksummer() { @Override public String checksum(byte[] filebytes) { - return Hashing.md5().hashBytes(filebytes).toString(); + return MessageDigests.toHexString(MessageDigests.md5().digest(filebytes)); } @Override diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/operation/hash/murmur3/Murmur3HashFunctionTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/operation/hash/murmur3/Murmur3HashFunctionTests.java index 23b928db247..ed454aead0d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/operation/hash/murmur3/Murmur3HashFunctionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/operation/hash/murmur3/Murmur3HashFunctionTests.java @@ -19,26 +19,24 @@ package org.elasticsearch.cluster.routing.operation.hash.murmur3; -import com.carrotsearch.randomizedtesting.generators.RandomInts; -import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import com.google.common.hash.HashFunction; -import com.google.common.hash.Hashing; import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.test.ESTestCase; public class Murmur3HashFunctionTests extends ESTestCase { - public void test() { - // Make sure that we agree with guava - Murmur3HashFunction murmur3 = new Murmur3HashFunction(); - HashFunction guavaMurmur3 = Hashing.murmur3_32(); - for (int i = 0; i < 100; ++i) { - final String id = RandomStrings.randomRealisticUnicodeOfCodepointLength(getRandom(), RandomInts.randomIntBetween(getRandom(), 1, 20)); - //final String id = "0"; - final int hash1 = guavaMurmur3.newHasher().putUnencodedChars(id).hash().asInt(); - final int hash2 = murmur3.hash(id); - assertEquals(hash1, hash2); - } + private static Murmur3HashFunction HASH = new Murmur3HashFunction(); + + public void testKnownValues() { + assertHash(0x5a0cb7c3, "hell"); + assertHash(0xd7c31989, "hello"); + assertHash(0x22ab2984, "hello w"); + assertHash(0xdf0ca123, "hello wo"); + assertHash(0xe7744d61, "hello wor"); + assertHash(0xe07db09c, "The quick brown fox jumps over the lazy dog"); + assertHash(0x4e63d2ad, "The quick brown fox jumps over the lazy cog"); } + private static void assertHash(int expected, String stringInput) { + assertEquals(expected, HASH.hash(stringInput)); + } } diff --git a/core/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java b/core/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java new file mode 100644 index 00000000000..f8d39fa6bf6 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.hash; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Test; + +import java.math.BigInteger; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; + +import static org.junit.Assert.*; + +public class MessageDigestsTests extends ESTestCase { + private void assertHash(String expected, String test, MessageDigest messageDigest) { + String actual = MessageDigests.toHexString(messageDigest.digest(test.getBytes(StandardCharsets.UTF_8))); + assertEquals(expected, actual); + } + + @Test + public void testMd5() throws Exception { + assertHash("d41d8cd98f00b204e9800998ecf8427e", "", MessageDigests.md5()); + assertHash("900150983cd24fb0d6963f7d28e17f72", "abc", MessageDigests.md5()); + assertHash("8215ef0796a20bcaaae116d3876c664a", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.md5()); + assertHash("7707d6ae4e027c70eea2a935c2296f21", new String(new char[1000000]).replace("\0", "a"), MessageDigests.md5()); + assertHash("9e107d9d372bb6826bd81d3542a419d6", "The quick brown fox jumps over the lazy dog", MessageDigests.md5()); + assertHash("1055d3e698d289f2af8663725127bd4b", "The quick brown fox jumps over the lazy cog", MessageDigests.md5()); + } + + @Test + public void testSha1() throws Exception { + assertHash("da39a3ee5e6b4b0d3255bfef95601890afd80709", "", MessageDigests.sha1()); + assertHash("a9993e364706816aba3e25717850c26c9cd0d89d", "abc", MessageDigests.sha1()); + assertHash("84983e441c3bd26ebaae4aa1f95129e5e54670f1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha1()); + assertHash("34aa973cd4c4daa4f61eeb2bdbad27316534016f", new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha1()); + assertHash("2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", "The quick brown fox jumps over the lazy dog", MessageDigests.sha1()); + assertHash("de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3", "The quick brown fox jumps over the lazy cog", MessageDigests.sha1()); + } + + @Test + public void testSha256() throws Exception { + assertHash("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", "", MessageDigests.sha256()); + assertHash("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad", "abc", MessageDigests.sha256()); + assertHash("248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1", "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", MessageDigests.sha256()); + assertHash("cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0", new String(new char[1000000]).replace("\0", "a"), MessageDigests.sha256()); + assertHash("d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592", "The quick brown fox jumps over the lazy dog", MessageDigests.sha256()); + assertHash("e4c4d8f3bf76b692de791a173e05321150f7a345b46484fe427f6acc7ecc81be", "The quick brown fox jumps over the lazy cog", MessageDigests.sha256()); + } + + @Test + public void testToHexString() throws Exception { + for (int i = 0; i < 1024; i++) { + BigInteger expected = BigInteger.probablePrime(256, random()); + byte[] bytes = expected.toByteArray(); + String hex = MessageDigests.toHexString(bytes); + String zeros = new String(new char[bytes.length * 2]).replace("\0", "0"); + String expectedAsString = expected.toString(16); + String expectedHex = zeros.substring(expectedAsString.length()) + expectedAsString; + assertEquals(expectedHex, hex); + BigInteger actual = new BigInteger(hex, 16); + assertEquals(expected, actual); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/hashing/MurmurHash3Tests.java b/core/src/test/java/org/elasticsearch/common/hashing/MurmurHash3Tests.java index d9d4057f961..cbdfe9ce3aa 100644 --- a/core/src/test/java/org/elasticsearch/common/hashing/MurmurHash3Tests.java +++ b/core/src/test/java/org/elasticsearch/common/hashing/MurmurHash3Tests.java @@ -19,37 +19,34 @@ package org.elasticsearch.common.hashing; -import com.google.common.hash.HashCode; -import com.google.common.hash.Hashing; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.test.ESTestCase; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.nio.LongBuffer; +import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; public class MurmurHash3Tests extends ESTestCase { - - public void testHash128() { - final int iters = scaledRandomIntBetween(100, 5000); - for (int i = 0; i < iters; ++i) { - final int seed = randomInt(); - final int offset = randomInt(20); - final int len = randomInt(randomBoolean() ? 20 : 200); - final byte[] bytes = new byte[len + offset + randomInt(3)]; - getRandom().nextBytes(bytes); - HashCode h1 = Hashing.murmur3_128(seed).hashBytes(bytes, offset, len); - MurmurHash3.Hash128 h2 = MurmurHash3.hash128(bytes, offset, len, seed, new MurmurHash3.Hash128()); - assertEquals(h1, h2); - } + public void testKnownValues() throws UnsupportedEncodingException { + assertHash(0x629942693e10f867L, 0x92db0b82baeb5347L, "hell", 0); + assertHash(0xa78ddff5adae8d10L, 0x128900ef20900135L, "hello", 1); + assertHash(0x8a486b23f422e826L, 0xf962a2c58947765fL, "hello ", 2); + assertHash(0x2ea59f466f6bed8cL, 0xc610990acc428a17L, "hello w", 3); + assertHash(0x79f6305a386c572cL, 0x46305aed3483b94eL, "hello wo", 4); + assertHash(0xc2219d213ec1f1b5L, 0xa1d8e2e0a52785bdL, "hello wor", 5); + assertHash(0xe34bbc7bbc071b6cL, 0x7a433ca9c49a9347L, "The quick brown fox jumps over the lazy dog", 0); + assertHash(0x658ca970ff85269aL, 0x43fee3eaa68e5c3eL, "The quick brown fox jumps over the lazy cog", 0); } - private void assertEquals(HashCode h1, MurmurHash3.Hash128 h2) { - final LongBuffer longs = ByteBuffer.wrap(h1.asBytes()).order(ByteOrder.LITTLE_ENDIAN).asLongBuffer(); - assertEquals(2, longs.limit()); - assertEquals(h1.asLong(), h2.h1); - assertEquals(longs.get(), h2.h1); - assertEquals(longs.get(), h2.h2); + private static void assertHash(long lower, long upper, String inputString, long seed) { + byte[] bytes = inputString.getBytes(StandardCharsets.UTF_8); + MurmurHash3.Hash128 expected = new MurmurHash3.Hash128(); + expected.h1 = lower; + expected.h2 = upper; + assertHash(expected, MurmurHash3.hash128(bytes, 0, bytes.length, seed, new MurmurHash3.Hash128())); } + private static void assertHash(MurmurHash3.Hash128 expected, MurmurHash3.Hash128 actual) { + assertEquals(expected.h1, actual.h1); + assertEquals(expected.h2, actual.h2); + } } diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginManagerIT.java b/core/src/test/java/org/elasticsearch/plugins/PluginManagerIT.java index c76bef55abe..1ffce8d71c2 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginManagerIT.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginManagerIT.java @@ -18,9 +18,6 @@ */ package org.elasticsearch.plugins; -import java.nio.charset.StandardCharsets; -import com.google.common.hash.Hashing; - import org.apache.http.impl.client.HttpClients; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; @@ -28,6 +25,7 @@ import org.elasticsearch.common.Base64; import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.CliTool.ExitStatus; import org.elasticsearch.common.cli.CliToolTestCase.CaptureOutputTerminal; +import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; @@ -46,16 +44,15 @@ import org.jboss.netty.handler.ssl.util.InsecureTrustManagerFactory; import org.jboss.netty.handler.ssl.util.SelfSignedCertificate; import org.junit.After; import org.junit.Before; -import org.junit.Test; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSocketFactory; - import java.io.BufferedWriter; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; +import java.nio.charset.StandardCharsets; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; @@ -109,7 +106,7 @@ public class PluginManagerIT extends ESIntegTestCase { } private void writeSha1(Path file, boolean corrupt) throws IOException { - String sha1Hex = Hashing.sha1().hashBytes(Files.readAllBytes(file)).toString(); + String sha1Hex = MessageDigests.toHexString(MessageDigests.sha1().digest(Files.readAllBytes(file))); try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".sha1"), StandardCharsets.UTF_8)) { out.write(sha1Hex); if (corrupt) { @@ -119,7 +116,7 @@ public class PluginManagerIT extends ESIntegTestCase { } private void writeMd5(Path file, boolean corrupt) throws IOException { - String md5Hex = Hashing.md5().hashBytes(Files.readAllBytes(file)).toString(); + String md5Hex = MessageDigests.toHexString(MessageDigests.md5().digest(Files.readAllBytes(file))); try (BufferedWriter out = Files.newBufferedWriter(file.resolveSibling(file.getFileName() + ".md5"), StandardCharsets.UTF_8)) { out.write(md5Hex); if (corrupt) { diff --git a/dev-tools/src/main/resources/forbidden/all-signatures.txt b/dev-tools/src/main/resources/forbidden/all-signatures.txt index 68c6ea7317b..e57a86227cd 100644 --- a/dev-tools/src/main/resources/forbidden/all-signatures.txt +++ b/dev-tools/src/main/resources/forbidden/all-signatures.txt @@ -130,6 +130,9 @@ com.google.common.primitives.Ints com.google.common.collect.ImmutableSet com.google.common.collect.ImmutableSet$Builder com.google.common.io.Resources +com.google.common.hash.HashCode +com.google.common.hash.HashFunction +com.google.common.hash.Hashing @defaultMessage Do not violate java's access system java.lang.reflect.AccessibleObject#setAccessible(boolean) diff --git a/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 935916149fb..cd478bef604 100644 --- a/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -19,14 +19,9 @@ package org.elasticsearch.script.groovy; -import java.nio.charset.StandardCharsets; - -import com.google.common.hash.Hashing; - import groovy.lang.Binding; import groovy.lang.GroovyClassLoader; import groovy.lang.Script; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.codehaus.groovy.ast.ClassCodeExpressionTransformer; @@ -41,9 +36,9 @@ import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.control.customizers.CompilationCustomizer; import org.codehaus.groovy.control.customizers.ImportCustomizer; import org.elasticsearch.SpecialPermission; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; @@ -53,6 +48,7 @@ import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.HashMap; @@ -172,7 +168,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri if (sm != null) { sm.checkPermission(new SpecialPermission()); } - return loader.parseClass(script, Hashing.sha1().hashString(script, StandardCharsets.UTF_8).toString()); + return loader.parseClass(script, MessageDigests.toHexString(MessageDigests.sha1().digest(script.getBytes(StandardCharsets.UTF_8)))); } catch (Throwable e) { if (logger.isTraceEnabled()) { logger.trace("exception compiling Groovy script:", e); From 8ff42834e9755ca64dacb71b13de75242128262a Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Sun, 4 Oct 2015 17:13:47 -0400 Subject: [PATCH 20/35] lock down javascript and python permissions --- .../bootstrap/BootstrapInfo.java | 9 +- .../org/elasticsearch/bootstrap/ESPolicy.java | 32 +++---- .../elasticsearch/bootstrap/security.policy | 4 +- .../{groovy.policy => untrusted.policy} | 4 +- .../bootstrap/ESPolicyTests.java | 9 +- .../bootstrap/MockPluginPolicy.java | 15 ++- .../groovy/GroovyScriptEngineService.java | 13 ++- .../JavaScriptScriptEngineService.java | 11 ++- .../javascript/JavaScriptSecurityTests.java | 89 ++++++++++++++++++ .../python/PythonScriptEngineService.java | 33 ++++++- .../script/python/PythonSecurityTests.java | 92 +++++++++++++++++++ 11 files changed, 279 insertions(+), 32 deletions(-) rename core/src/main/resources/org/elasticsearch/bootstrap/{groovy.policy => untrusted.policy} (90%) create mode 100644 plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java create mode 100644 plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java index 76485bb86e5..f1278af96f4 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java @@ -45,9 +45,16 @@ public final class BootstrapInfo { } /** - * Returns true if secure computing mode is enabled (linux/amd64 only) + * Returns true if secure computing mode is enabled (linux/amd64, OS X only) */ public static boolean isSeccompInstalled() { return Natives.isSeccompInstalled(); } + + /** + * codebase location for untrusted scripts (provide some additional safety) + *

+ * This is not a full URL, just a path. + */ + public static final String UNTRUSTED_CODEBASE = "/untrusted"; } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java b/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java index 9db66ca9c14..ae993f25814 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/ESPolicy.java @@ -26,29 +26,27 @@ import java.net.URL; import java.security.CodeSource; import java.security.Permission; import java.security.PermissionCollection; -import java.security.Permissions; import java.security.Policy; import java.security.ProtectionDomain; import java.security.URIParameter; -import java.util.PropertyPermission; /** custom policy for union of static and dynamic permissions */ final class ESPolicy extends Policy { /** template policy file, the one used in tests */ static final String POLICY_RESOURCE = "security.policy"; - /** limited policy for groovy scripts */ - static final String GROOVY_RESOURCE = "groovy.policy"; + /** limited policy for scripts */ + static final String UNTRUSTED_RESOURCE = "untrusted.policy"; final Policy template; - final Policy groovy; + final Policy untrusted; final PermissionCollection dynamic; public ESPolicy(PermissionCollection dynamic) throws Exception { URI policyUri = getClass().getResource(POLICY_RESOURCE).toURI(); - URI groovyUri = getClass().getResource(GROOVY_RESOURCE).toURI(); + URI untrustedUri = getClass().getResource(UNTRUSTED_RESOURCE).toURI(); this.template = Policy.getInstance("JavaPolicy", new URIParameter(policyUri)); - this.groovy = Policy.getInstance("JavaPolicy", new URIParameter(groovyUri)); + this.untrusted = Policy.getInstance("JavaPolicy", new URIParameter(untrustedUri)); this.dynamic = dynamic; } @@ -56,15 +54,17 @@ final class ESPolicy extends Policy { public boolean implies(ProtectionDomain domain, Permission permission) { CodeSource codeSource = domain.getCodeSource(); // codesource can be null when reducing privileges via doPrivileged() - if (codeSource != null) { - URL location = codeSource.getLocation(); - // location can be null... ??? nobody knows - // https://bugs.openjdk.java.net/browse/JDK-8129972 - if (location != null) { - // run groovy scripts with no permissions (except logging property) - if ("/groovy/script".equals(location.getFile())) { - return groovy.implies(domain, permission); - } + if (codeSource == null) { + return false; + } + + URL location = codeSource.getLocation(); + // location can be null... ??? nobody knows + // https://bugs.openjdk.java.net/browse/JDK-8129972 + if (location != null) { + // run scripts with limited permissions + if (BootstrapInfo.UNTRUSTED_CODEBASE.equals(location.getFile())) { + return untrusted.implies(domain, permission); } } diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index ae2320feaf8..11268245670 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -69,8 +69,8 @@ grant codeBase "${es.security.plugin.lang-groovy}" { permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect"; // needed by GroovyScriptEngineService to close its classloader (why?) permission java.lang.RuntimePermission "closeClassLoader"; - // Allow executing groovy scripts with codesource of /groovy/script - permission groovy.security.GroovyCodeSourcePermission "/groovy/script"; + // Allow executing groovy scripts with codesource of /untrusted + permission groovy.security.GroovyCodeSourcePermission "/untrusted"; }; grant codeBase "${es.security.plugin.lang-javascript}" { diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/groovy.policy b/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy similarity index 90% rename from core/src/main/resources/org/elasticsearch/bootstrap/groovy.policy rename to core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy index 4e1275827d9..2475c56e814 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/groovy.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy @@ -18,8 +18,8 @@ */ /* - * Limited security policy for groovy scripts. - * This is what is needed for its invokeDynamic functionality to work. + * Limited security policy for scripts. + * This is what is needed for invokeDynamic functionality to work. */ grant { diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ESPolicyTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ESPolicyTests.java index 5423e68b555..b7ed195e0f9 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ESPolicyTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ESPolicyTests.java @@ -24,7 +24,9 @@ import org.elasticsearch.test.ESTestCase; import java.io.FilePermission; import java.security.AccessControlContext; import java.security.AccessController; +import java.security.AllPermission; import java.security.CodeSource; +import java.security.Permission; import java.security.PermissionCollection; import java.security.Permissions; import java.security.PrivilegedAction; @@ -48,8 +50,13 @@ public class ESPolicyTests extends ESTestCase { */ public void testNullCodeSource() throws Exception { assumeTrue("test cannot run with security manager", System.getSecurityManager() == null); + // create a policy with AllPermission + Permission all = new AllPermission(); + PermissionCollection allCollection = all.newPermissionCollection(); + allCollection.add(all); + ESPolicy policy = new ESPolicy(allCollection); + // restrict ourselves to NoPermission PermissionCollection noPermissions = new Permissions(); - ESPolicy policy = new ESPolicy(noPermissions); assertFalse(policy.implies(new ProtectionDomain(null, noPermissions), new FilePermission("foo", "read"))); } diff --git a/core/src/test/java/org/elasticsearch/bootstrap/MockPluginPolicy.java b/core/src/test/java/org/elasticsearch/bootstrap/MockPluginPolicy.java index bd366a28c13..c301ec78b33 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/MockPluginPolicy.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/MockPluginPolicy.java @@ -35,7 +35,6 @@ import java.security.ProtectionDomain; import java.security.cert.Certificate; import java.util.Collections; import java.util.HashSet; -import java.util.Objects; import java.util.Set; /** @@ -99,18 +98,24 @@ final class MockPluginPolicy extends Policy { excludedSources.add(RandomizedRunner.class.getProtectionDomain().getCodeSource()); // junit library excludedSources.add(Assert.class.getProtectionDomain().getCodeSource()); - // groovy scripts - excludedSources.add(new CodeSource(new URL("file:/groovy/script"), (Certificate[])null)); + // scripts + excludedSources.add(new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[])null)); Loggers.getLogger(getClass()).debug("Apply permissions [{}] excluding codebases [{}]", extraPermissions, excludedSources); } @Override public boolean implies(ProtectionDomain domain, Permission permission) { + CodeSource codeSource = domain.getCodeSource(); + // codesource can be null when reducing privileges via doPrivileged() + if (codeSource == null) { + return false; + } + if (standardPolicy.implies(domain, permission)) { return true; - } else if (excludedSources.contains(domain.getCodeSource()) == false && - Objects.toString(domain.getCodeSource()).contains("test-classes") == false) { + } else if (excludedSources.contains(codeSource) == false && + codeSource.toString().contains("test-classes") == false) { return extraPermissions.implies(permission); } else { return false; diff --git a/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index cd478bef604..1644effb16d 100644 --- a/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/plugins/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -21,6 +21,7 @@ package org.elasticsearch.script.groovy; import groovy.lang.Binding; import groovy.lang.GroovyClassLoader; +import groovy.lang.GroovyCodeSource; import groovy.lang.Script; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; @@ -36,6 +37,8 @@ import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.control.customizers.CompilationCustomizer; import org.codehaus.groovy.control.customizers.ImportCustomizer; import org.elasticsearch.SpecialPermission; +import org.elasticsearch.bootstrap.BootstrapInfo; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.hash.MessageDigests; @@ -168,7 +171,15 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri if (sm != null) { sm.checkPermission(new SpecialPermission()); } - return loader.parseClass(script, MessageDigests.toHexString(MessageDigests.sha1().digest(script.getBytes(StandardCharsets.UTF_8)))); + String fake = MessageDigests.toHexString(MessageDigests.sha1().digest(script.getBytes(StandardCharsets.UTF_8))); + // same logic as GroovyClassLoader.parseClass() but with a different codesource string: + GroovyCodeSource gcs = AccessController.doPrivileged(new PrivilegedAction() { + public GroovyCodeSource run() { + return new GroovyCodeSource(script, fake, BootstrapInfo.UNTRUSTED_CODEBASE); + } + }); + gcs.setCachable(false); + return loader.parseClass(gcs); } catch (Throwable e) { if (logger.isTraceEnabled()) { logger.trace("exception compiling Groovy script:", e); diff --git a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java index f2078971ab6..0adf9ca5f60 100644 --- a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java +++ b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java @@ -22,6 +22,7 @@ package org.elasticsearch.script.javascript; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.elasticsearch.SpecialPermission; +import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; @@ -36,6 +37,10 @@ import org.mozilla.javascript.*; import org.mozilla.javascript.Script; import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.security.CodeSource; +import java.security.cert.Certificate; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; @@ -105,7 +110,11 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements try { ctx.setWrapFactory(wrapFactory); ctx.setOptimizationLevel(optimizationLevel); - return ctx.compileString(script, generateScriptName(), 1, null); + ctx.setSecurityController(new PolicySecurityController()); + return ctx.compileString(script, generateScriptName(), 1, + new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null)); + } catch (MalformedURLException e) { + throw new RuntimeException(e); } finally { Context.exit(); } diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java new file mode 100644 index 00000000000..36636eb0cc4 --- /dev/null +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.javascript; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; +import org.mozilla.javascript.WrappedException; + +import java.util.HashMap; +import java.util.Map; + +/** + * Tests for the Javascript security permissions + */ +public class JavaScriptSecurityTests extends ESTestCase { + + private JavaScriptScriptEngineService se; + + @Before + public void setup() { + se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); + } + + @After + public void close() { + se.close(); + } + + /** runs a script */ + private void doTest(String script) { + Map vars = new HashMap(); + se.execute(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "js", se.compile(script)), vars); + } + + /** asserts that a script runs without exception */ + private void assertSuccess(String script) { + doTest(script); + } + + /** assert that a security exception is hit */ + private void assertFailure(String script) { + try { + doTest(script); + fail("did not get expected exception"); + } catch (WrappedException expected) { + Throwable cause = expected.getCause(); + assertNotNull(cause); + assertTrue("unexpected exception: " + cause, cause instanceof SecurityException); + } + } + + /** Test some javascripts that are ok */ + public void testOK() { + assertSuccess("1 + 2"); + assertSuccess("Math.cos(Math.PI)"); + } + + /** Test some javascripts that should hit security exception */ + public void testNotOK() { + // sanity check :) + assertFailure("java.lang.Runtime.getRuntime().halt(0)"); + // check a few things more restrictive than the ordinary policy + // no network + assertFailure("new java.net.Socket(\"localhost\", 1024)"); + // no files + assertFailure("java.io.File.createTempFile(\"test\", \"tmp\")"); + } +} diff --git a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java index f4d83cf502d..87bfbb5af15 100644 --- a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java +++ b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java @@ -20,8 +20,11 @@ package org.elasticsearch.script.python; import java.io.IOException; +import java.security.AccessControlContext; import java.security.AccessController; +import java.security.Permissions; import java.security.PrivilegedAction; +import java.security.ProtectionDomain; import java.util.Map; import org.apache.lucene.index.LeafReaderContext; @@ -125,7 +128,8 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri public Object execute(CompiledScript compiledScript, Map vars) { PyObject pyVars = Py.java2py(vars); interp.setLocals(pyVars); - PyObject ret = interp.eval((PyCode) compiledScript.compiled()); + // eval the script with reduced privileges + PyObject ret = evalRestricted((PyCode) compiledScript.compiled()); if (ret == null) { return null; } @@ -171,7 +175,8 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri @Override public Object run() { interp.setLocals(pyVars); - PyObject ret = interp.eval(code); + // eval the script with reduced privileges + PyObject ret = evalRestricted(code); if (ret == null) { return null; } @@ -229,7 +234,8 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri @Override public Object run() { interp.setLocals(pyVars); - PyObject ret = interp.eval(code); + // eval the script with reduced privileges + PyObject ret = evalRestricted(code); if (ret == null) { return null; } @@ -257,6 +263,27 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri } } + // we don't have a way to specify codesource for generated jython classes, + // so we just run them with a special context to reduce privileges + private static final AccessControlContext PY_CONTEXT; + static { + Permissions none = new Permissions(); + none.setReadOnly(); + PY_CONTEXT = new AccessControlContext(new ProtectionDomain[] { + new ProtectionDomain(null, none) + }); + } + + /** Evaluates with reduced privileges */ + private final PyObject evalRestricted(final PyCode code) { + // eval the script with reduced privileges + return AccessController.doPrivileged(new PrivilegedAction() { + @Override + public PyObject run() { + return interp.eval(code); + } + }, PY_CONTEXT); + } public static Object unwrapValue(Object value) { if (value == null) { diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java new file mode 100644 index 00000000000..745a109d5f1 --- /dev/null +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.python; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; +import org.python.core.PyException; + +import java.util.HashMap; +import java.util.Map; + +/** + * Tests for Python security permissions + */ +public class PythonSecurityTests extends ESTestCase { + + private PythonScriptEngineService se; + + @Before + public void setup() { + se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS); + } + + @After + public void close() { + // We need to clear some system properties + System.clearProperty("python.cachedir.skip"); + System.clearProperty("python.console.encoding"); + se.close(); + } + + /** runs a script */ + private void doTest(String script) { + Map vars = new HashMap(); + se.execute(new CompiledScript(ScriptService.ScriptType.INLINE, "test", "python", se.compile(script)), vars); + } + + /** asserts that a script runs without exception */ + private void assertSuccess(String script) { + doTest(script); + } + + /** assert that a security exception is hit */ + private void assertFailure(String script) { + try { + doTest(script); + fail("did not get expected exception"); + } catch (PyException expected) { + Throwable cause = expected.getCause(); + assertNotNull("null cause for exception: " + expected, cause); + assertTrue("unexpected exception: " + cause, cause instanceof SecurityException); + } + } + + /** Test some py scripts that are ok */ + public void testOK() { + assertSuccess("1 + 2"); + assertSuccess("from java.lang import Math\nMath.cos(0)"); + } + + /** Test some py scripts that should hit security exception */ + public void testNotOK() { + // sanity check :) + assertFailure("from java.lang import Runtime\nRuntime.getRuntime().halt(0)"); + // check a few things more restrictive than the ordinary policy + // no network + assertFailure("from java.net import Socket\nSocket(\"localhost\", 1024)"); + // no files + assertFailure("from java.io import File\nFile.createTempFile(\"test\", \"tmp\")"); + } +} From d3cef85352ac9db90c7b603d18e22d78e9f8875c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 4 Oct 2015 21:02:50 -0400 Subject: [PATCH 21/35] Remove unnecessary call to MessageDigest.reset --- .../elasticsearch/common/hash/MessageDigests.java | 13 +++++-------- .../common/hash/MessageDigestsTests.java | 2 +- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/hash/MessageDigests.java b/core/src/main/java/org/elasticsearch/common/hash/MessageDigests.java index 7dc495f9c3b..7b3a108cc45 100644 --- a/core/src/main/java/org/elasticsearch/common/hash/MessageDigests.java +++ b/core/src/main/java/org/elasticsearch/common/hash/MessageDigests.java @@ -41,23 +41,20 @@ public class MessageDigests { } public static MessageDigest md5() { - return cloneAndReset(MD5_DIGEST); + return clone(MD5_DIGEST); } - public static MessageDigest sha1() { - return cloneAndReset(SHA_1_DIGEST); + return clone(SHA_1_DIGEST); } public static MessageDigest sha256() { - return cloneAndReset(SHA_256_DIGEST); + return clone(SHA_256_DIGEST); } - private static MessageDigest cloneAndReset(MessageDigest messageDigest) { + private static MessageDigest clone(MessageDigest messageDigest) { try { - MessageDigest clone = (MessageDigest) messageDigest.clone(); - clone.reset(); - return clone; + return (MessageDigest) messageDigest.clone(); } catch (CloneNotSupportedException e) { throw new ElasticsearchException("Unexpected exception cloning MessageDigest instance", e); } diff --git a/core/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java b/core/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java index f8d39fa6bf6..dbc174ba2d6 100644 --- a/core/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java +++ b/core/src/test/java/org/elasticsearch/common/hash/MessageDigestsTests.java @@ -70,7 +70,7 @@ public class MessageDigestsTests extends ESTestCase { BigInteger expected = BigInteger.probablePrime(256, random()); byte[] bytes = expected.toByteArray(); String hex = MessageDigests.toHexString(bytes); - String zeros = new String(new char[bytes.length * 2]).replace("\0", "0"); + String zeros = new String(new char[2 * bytes.length]).replace("\0", "0"); String expectedAsString = expected.toString(16); String expectedHex = zeros.substring(expectedAsString.length()) + expectedAsString; assertEquals(expectedHex, hex); From 668371c945b8ad4e5d263a9db3eb3d2bb2db8dd1 Mon Sep 17 00:00:00 2001 From: xuzha Date: Tue, 29 Sep 2015 12:17:51 -0700 Subject: [PATCH 22/35] Forbid index name with '.' and '..'. Fixes #13858 --- .../metadata/MetaDataCreateIndexService.java | 3 +++ .../elasticsearch/indexing/IndexActionIT.java | 20 ++++++++++++++++++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 272bf61e777..99ce095df55 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -203,6 +203,9 @@ public class MetaDataCreateIndexService extends AbstractComponent { if (state.metaData().hasAlias(index)) { throw new InvalidIndexNameException(new Index(index), index, "already exists as alias"); } + if (index.equals(".") || index.equals("..")) { + throw new InvalidIndexNameException(new Index(index), index, "must not be '.' or '..'"); + } } private void createIndex(final CreateIndexClusterStateUpdateRequest request, final ActionListener listener, final Semaphore mdLock) { diff --git a/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java b/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java index 970084146a5..1c914c10c03 100644 --- a/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java @@ -203,7 +203,7 @@ public class IndexActionIT extends ESIntegTestCase { try { // Catch chars that are more than a single byte - client().prepareIndex(randomAsciiOfLength(MetaDataCreateIndexService.MAX_INDEX_NAME_BYTES -1).toLowerCase(Locale.ROOT) + + client().prepareIndex(randomAsciiOfLength(MetaDataCreateIndexService.MAX_INDEX_NAME_BYTES - 1).toLowerCase(Locale.ROOT) + "Ïž".toLowerCase(Locale.ROOT), "mytype").setSource("foo", "bar").get(); fail("exception should have been thrown on too-long index name"); @@ -215,4 +215,22 @@ public class IndexActionIT extends ESIntegTestCase { // we can create an index of max length createIndex(randomAsciiOfLength(MetaDataCreateIndexService.MAX_INDEX_NAME_BYTES).toLowerCase(Locale.ROOT)); } + + public void testInvalidIndexName() { + try { + createIndex("."); + fail("exception should have been thrown on dot index name"); + } catch (InvalidIndexNameException e) { + assertThat("exception contains message about index name is dot " + e.getMessage(), + e.getMessage().contains("Invalid index name [.], must not be \'.\' or '..'"), equalTo(true)); + } + + try { + createIndex(".."); + fail("exception should have been thrown on dot index name"); + } catch (InvalidIndexNameException e) { + assertThat("exception contains message about index name is dot " + e.getMessage(), + e.getMessage().contains("Invalid index name [..], must not be \'.\' or '..'"), equalTo(true)); + } + } } From ceefb06752073a3cc16fd981296870d6f4384bc6 Mon Sep 17 00:00:00 2001 From: Britta Weber Date: Mon, 5 Oct 2015 13:06:07 +0200 Subject: [PATCH 23/35] settings in log config file should not overwrite custom parameters --- .../common/logging/log4j/LogConfigurator.java | 8 +++-- .../log4j/LoggingConfigurationTests.java | 36 ++++++++++++++++--- 2 files changed, 37 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/logging/log4j/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/log4j/LogConfigurator.java index 21c4cdd530b..8f92864ea0f 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/log4j/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/log4j/LogConfigurator.java @@ -90,12 +90,14 @@ public class LogConfigurator { loaded = true; // TODO: this is partly a copy of InternalSettingsPreparer...we should pass in Environment and not do all this... Environment environment = new Environment(settings); - Settings.Builder settingsBuilder = settingsBuilder().put(settings); + Settings.Builder settingsBuilder = settingsBuilder(); resolveConfig(environment, settingsBuilder); settingsBuilder .putProperties("elasticsearch.", System.getProperties()) - .putProperties("es.", System.getProperties()) - .replacePropertyPlaceholders(); + .putProperties("es.", System.getProperties()); + // add custom settings after config was added so that they are not overwritten by config + settingsBuilder.put(settings); + settingsBuilder.replacePropertyPlaceholders(); Properties props = new Properties(); for (Map.Entry entry : settingsBuilder.build().getAsMap().entrySet()) { String key = "log4j." + entry.getKey(); diff --git a/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java b/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java index 6515432c1a7..199f94c3151 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java @@ -21,20 +21,21 @@ package org.elasticsearch.common.logging.log4j; import org.apache.log4j.Appender; import org.apache.log4j.Logger; +import org.elasticsearch.common.cli.CliToolTestCase; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.StandardOpenOption; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.*; /** * @@ -148,7 +149,34 @@ public class LoggingConfigurationTests extends ESTestCase { LogConfigurator.resolveConfig(environment, builder); Settings logSettings = builder.build(); - assertThat(logSettings.get("yml"), Matchers.nullValue()); + assertThat(logSettings.get("yml"), nullValue()); + } + + // tests that custom settings are not overwritten by settings in the config file + @Test + public void testResolveOrder() throws Exception { + Path tmpDir = createTempDir(); + Path loggingConf = tmpDir.resolve(loggingConfiguration("yaml")); + Files.write(loggingConf, "logger.test: INFO, file\n".getBytes(StandardCharsets.UTF_8)); + Files.write(loggingConf, "appender.file.type: file\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND); + Environment environment = InternalSettingsPreparer.prepareEnvironment( + Settings.builder() + .put("path.conf", tmpDir.toAbsolutePath()) + .put("path.home", createTempDir().toString()) + .put("logger.test", "TRACE, console") + .put("appender.console.type", "console") + .put("appender.console.layout.type", "consolePattern") + .put("appender.console.layout.conversionPattern", "[%d{ISO8601}][%-5p][%-25c] %m%n") + .build(), new CliToolTestCase.MockTerminal()); + LogConfigurator.configure(environment.settings()); + // args should overwrite whatever is in the config + ESLogger esLogger = Log4jESLoggerFactory.getLogger("test"); + Logger logger = ((Log4jESLogger) esLogger).logger(); + Appender appender = logger.getAppender("console"); + assertThat(appender, notNullValue()); + assertTrue(logger.isTraceEnabled()); + appender = logger.getAppender("file"); + assertThat(appender, nullValue()); } private static String loggingConfiguration(String suffix) { From c0eca94a044eec90be859f28a6b0cd652c789714 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 30 Sep 2015 23:13:28 +0200 Subject: [PATCH 24/35] Remove shard-level injector Today we use a hirachical injector on the shard level for each shard created. This commit removes the shard level injetor and replaces it with good old constructor calls. This also removes all shard level plugin facilities such that plugins can only have node or index level modules. For plugins that need to track shard lifecycles they should use the relevant callback from the lifecycle we already provide. --- core/pom.xml | 2 +- .../org/elasticsearch/index/IndexModule.java | 14 +- .../org/elasticsearch/index/IndexService.java | 111 +++------------ .../index/IndexServicesProvider.java | 132 ++++++++++++++++++ .../elasticsearch/index/engine/Engine.java | 37 ++++- .../index/engine/EngineConfig.java | 14 +- .../index/engine/IndexSearcherWrapper.java | 2 +- .../engine/IndexSearcherWrappingService.java | 94 ------------- .../elasticsearch/index/shard/IndexShard.java | 42 +++--- .../index/shard/IndexShardModule.java | 16 +-- .../index/shard/ShadowIndexShard.java | 39 +----- .../elasticsearch/index/store/IndexStore.java | 6 +- .../elasticsearch/indices/IndicesService.java | 2 +- .../org/elasticsearch/plugins/Plugin.java | 7 - .../elasticsearch/plugins/PluginsService.java | 8 -- .../{shard => }/MockEngineFactoryPlugin.java | 7 +- .../index/engine/InternalEngineTests.java | 19 +-- .../index/engine/ShadowEngineTests.java | 2 +- .../indices/leaks/IndicesLeaksIT.java | 131 ----------------- .../test/InternalTestCluster.java | 2 +- .../test/store/MockFSIndexStore.java | 13 +- 21 files changed, 257 insertions(+), 443 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java delete mode 100644 core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrappingService.java rename core/src/test/java/org/elasticsearch/index/{shard => }/MockEngineFactoryPlugin.java (88%) delete mode 100644 core/src/test/java/org/elasticsearch/indices/leaks/IndicesLeaksIT.java diff --git a/core/pom.xml b/core/pom.xml index a96e3746bbd..2a5f6deabd1 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -274,7 +274,7 @@ org/elasticsearch/common/cli/CliToolTestCase$*.class org/elasticsearch/cluster/MockInternalClusterInfoService.class org/elasticsearch/cluster/MockInternalClusterInfoService$*.class - org/elasticsearch/index/shard/MockEngineFactoryPlugin.class + org/elasticsearch/index/MockEngineFactoryPlugin.class org/elasticsearch/search/MockSearchService.class org/elasticsearch/search/MockSearchService$*.class org/elasticsearch/search/aggregations/bucket/AbstractTermsTestCase.class diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index d94eb4f9c7d..59bec88d81a 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -20,21 +20,21 @@ package org.elasticsearch.index; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.engine.EngineFactory; +import org.elasticsearch.index.engine.InternalEngineFactory; /** * */ public class IndexModule extends AbstractModule { - private final Settings settings; - - public IndexModule(Settings settings) { - this.settings = settings; - } - + // pkg private so tests can mock + Class engineFactoryImpl = InternalEngineFactory.class; + @Override protected void configure() { + bind(EngineFactory.class).to(engineFactoryImpl); bind(IndexService.class).asEagerSingleton(); + bind(IndexServicesProvider.class).asEagerSingleton(); } } diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 3c40b02a4b9..f3c330a182b 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -24,16 +24,11 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.CreationException; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.Injectors; -import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; @@ -49,15 +44,10 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettingsService; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.IndexShardModule; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardNotFoundException; -import org.elasticsearch.index.shard.ShardPath; +import org.elasticsearch.index.shard.*; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; -import org.elasticsearch.index.store.StoreModule; import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InternalIndicesLifecycle; @@ -110,25 +100,8 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone private final NodeEnvironment nodeEnv; private final IndicesService indicesServices; - private volatile ImmutableMap shards = ImmutableMap.of(); + private volatile ImmutableMap shards = ImmutableMap.of(); - private static class IndexShardInjectorPair { - private final IndexShard indexShard; - private final Injector injector; - - public IndexShardInjectorPair(IndexShard indexShard, Injector injector) { - this.indexShard = indexShard; - this.injector = injector; - } - - public IndexShard getIndexShard() { - return indexShard; - } - - public Injector getInjector() { - return injector; - } - } private final AtomicBoolean closed = new AtomicBoolean(false); private final AtomicBoolean deleted = new AtomicBoolean(false); @@ -173,7 +146,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone @Override public Iterator iterator() { - return shards.values().stream().map((p) -> p.getIndexShard()).iterator(); + return shards.values().iterator(); } public boolean hasShard(int shardId) { @@ -185,11 +158,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone */ @Nullable public IndexShard shard(int shardId) { - IndexShardInjectorPair indexShardInjectorPair = shards.get(shardId); - if (indexShardInjectorPair != null) { - return indexShardInjectorPair.getIndexShard(); - } - return null; + return shards.get(shardId); } /** @@ -261,16 +230,6 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone } } - /** - * Return the shard injector for the provided id, or throw an exception if there is no such shard. - */ - public Injector shardInjectorSafe(int shardId) { - IndexShardInjectorPair indexShardInjectorPair = shards.get(shardId); - if (indexShardInjectorPair == null) { - throw new ShardNotFoundException(new ShardId(index, shardId)); - } - return indexShardInjectorPair.getInjector(); - } public String indexUUID() { return indexSettings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); @@ -304,7 +263,8 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone final ShardId shardId = new ShardId(index, sShardId); ShardLock lock = null; boolean success = false; - Injector shardInjector = null; + Store store = null; + IndexShard indexShard = null; try { lock = nodeEnv.shardLock(shardId, TimeUnit.SECONDS.toMillis(5)); indicesLifecycle.beforeIndexShardCreated(shardId, indexSettings); @@ -351,38 +311,18 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone // if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary. final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false || (primary && IndexMetaData.isOnSharedFilesystem(indexSettings)); - ModulesBuilder modules = new ModulesBuilder(); - // plugin modules must be added here, before others or we can get crazy injection errors... - for (Module pluginModule : pluginsService.shardModules(indexSettings)) { - modules.add(pluginModule); - } - modules.add(new IndexShardModule(shardId, primary, indexSettings)); - modules.add(new StoreModule(injector.getInstance(IndexStore.class).shardDirectory(), lock, - new StoreCloseListener(shardId, canDeleteShardContent, new Closeable() { - @Override - public void close() throws IOException { - injector.getInstance(IndicesQueryCache.class).onClose(shardId); - } - }), path)); - pluginsService.processModules(modules); - - try { - shardInjector = modules.createChildInjector(injector); - } catch (CreationException e) { - ElasticsearchException ex = new ElasticsearchException("failed to create shard", Injectors.getFirstErrorFailure(e)); - ex.setShard(shardId); - throw ex; - } catch (Throwable e) { - ElasticsearchException ex = new ElasticsearchException("failed to create shard", e); - ex.setShard(shardId); - throw ex; + IndexStore indexStore = injector.getInstance(IndexStore.class); + store = new Store(shardId, indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> injector.getInstance(IndicesQueryCache.class).onClose(shardId))); + if (primary && IndexMetaData.isIndexUsingShadowReplicas(indexSettings)) { + indexShard = new ShadowIndexShard(shardId, indexSettings, path, store, injector.getInstance(IndexServicesProvider.class)); + } else { + indexShard = new IndexShard(shardId, indexSettings, path, store, injector.getInstance(IndexServicesProvider.class)); } - IndexShard indexShard = shardInjector.getInstance(IndexShard.class); indicesLifecycle.indexShardStateChanged(indexShard, null, "shard created"); indicesLifecycle.afterIndexShardCreated(indexShard); - shards = newMapBuilder(shards).put(shardId.id(), new IndexShardInjectorPair(indexShard, shardInjector)).immutableMap(); + shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap(); settingsService.addListener(indexShard); success = true; return indexShard; @@ -393,10 +333,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone } finally { if (success == false) { IOUtils.closeWhileHandlingException(lock); - if (shardInjector != null) { - IndexShard indexShard = shardInjector.getInstance(IndexShard.class); - closeShardInjector("initialization failed", shardId, shardInjector, indexShard); - } + closeShard("initialization failed", shardId, indexShard, store); } } } @@ -409,29 +346,19 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone return; } logger.debug("[{}] closing... (reason: [{}])", shardId, reason); - HashMap tmpShardsMap = new HashMap<>(shards); - IndexShardInjectorPair indexShardInjectorPair = tmpShardsMap.remove(shardId); - indexShard = indexShardInjectorPair.getIndexShard(); - shardInjector = indexShardInjectorPair.getInjector(); + HashMap tmpShardsMap = new HashMap<>(shards); + indexShard = tmpShardsMap.remove(shardId); shards = ImmutableMap.copyOf(tmpShardsMap); - closeShardInjector(reason, sId, shardInjector, indexShard); + closeShard(reason, sId, indexShard, indexShard.store()); logger.debug("[{}] closed (reason: [{}])", shardId, reason); } - private void closeShardInjector(String reason, ShardId sId, Injector shardInjector, IndexShard indexShard) { + private void closeShard(String reason, ShardId sId, IndexShard indexShard, Store store) { final int shardId = sId.id(); try { try { indicesLifecycle.beforeIndexShardClosed(sId, indexShard, indexSettings); } finally { - // close everything else even if the beforeIndexShardClosed threw an exception - for (Class closeable : pluginsService.shardServices()) { - try { - shardInjector.getInstance(closeable).close(); - } catch (Throwable e) { - logger.debug("[{}] failed to clean plugin shard service [{}]", e, shardId, closeable); - } - } // this logic is tricky, we want to close the engine so we rollback the changes done to it // and close the shard so no operations are allowed to it if (indexShard != null) { @@ -449,7 +376,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone } } finally { try { - shardInjector.getInstance(Store.class).close(); + store.close(); } catch (Throwable e) { logger.warn("[{}] failed to close store on shard removal (reason: [{}])", e, shardId, reason); } diff --git a/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java b/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java new file mode 100644 index 00000000000..ad136c58dd6 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java @@ -0,0 +1,132 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index; + +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.index.aliases.IndexAliasesService; +import org.elasticsearch.index.cache.IndexCache; +import org.elasticsearch.index.codec.CodecService; +import org.elasticsearch.index.engine.EngineFactory; +import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.IndexQueryParserService; +import org.elasticsearch.index.similarity.SimilarityService; +import org.elasticsearch.index.termvectors.TermVectorsService; +import org.elasticsearch.indices.IndicesLifecycle; +import org.elasticsearch.indices.IndicesWarmer; +import org.elasticsearch.indices.cache.query.IndicesQueryCache; +import org.elasticsearch.threadpool.ThreadPool; + +/** + * Simple provider class that holds the Index and Node level services used by + * a shard. + * This is just a temporary solution until we cleaned up index creation and removed injectors on that level as well. + */ +public final class IndexServicesProvider { + + private final IndicesLifecycle indicesLifecycle; + private final ThreadPool threadPool; + private final MapperService mapperService; + private final IndexQueryParserService queryParserService; + private final IndexCache indexCache; + private final IndexAliasesService indexAliasesService; + private final IndicesQueryCache indicesQueryCache; + private final CodecService codecService; + private final TermVectorsService termVectorsService; + private final IndexFieldDataService indexFieldDataService; + private final IndicesWarmer warmer; + private final SimilarityService similarityService; + private final EngineFactory factory; + private final BigArrays bigArrays; + + @Inject + public IndexServicesProvider(IndicesLifecycle indicesLifecycle, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndexAliasesService indexAliasesService, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays) { + this.indicesLifecycle = indicesLifecycle; + this.threadPool = threadPool; + this.mapperService = mapperService; + this.queryParserService = queryParserService; + this.indexCache = indexCache; + this.indexAliasesService = indexAliasesService; + this.indicesQueryCache = indicesQueryCache; + this.codecService = codecService; + this.termVectorsService = termVectorsService; + this.indexFieldDataService = indexFieldDataService; + this.warmer = warmer; + this.similarityService = similarityService; + this.factory = factory; + this.bigArrays = bigArrays; + } + + public IndicesLifecycle getIndicesLifecycle() { + return indicesLifecycle; + } + + public ThreadPool getThreadPool() { + return threadPool; + } + + public MapperService getMapperService() { + return mapperService; + } + + public IndexQueryParserService getQueryParserService() { + return queryParserService; + } + + public IndexCache getIndexCache() { + return indexCache; + } + + public IndexAliasesService getIndexAliasesService() { + return indexAliasesService; + } + + public IndicesQueryCache getIndicesQueryCache() { + return indicesQueryCache; + } + + public CodecService getCodecService() { + return codecService; + } + + public TermVectorsService getTermVectorsService() { + return termVectorsService; + } + + public IndexFieldDataService getIndexFieldDataService() { + return indexFieldDataService; + } + + public IndicesWarmer getWarmer() { + return warmer; + } + + public SimilarityService getSimilarityService() { + return similarityService; + } + + public EngineFactory getFactory() { + return factory; + } + + public BigArrays getBigArrays() { + return bigArrays; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index f9331c4416a..dc35b95d2c1 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.SearcherManager; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; @@ -78,6 +79,7 @@ public abstract class Engine implements Closeable { protected final ReentrantReadWriteLock rwl = new ReentrantReadWriteLock(); protected final ReleasableLock readLock = new ReleasableLock(rwl.readLock()); protected final ReleasableLock writeLock = new ReleasableLock(rwl.writeLock()); + private final IndexSearcherWrapper searcherWrapper; protected volatile Throwable failedEngine = null; @@ -92,6 +94,7 @@ public abstract class Engine implements Closeable { engineConfig.getIndexSettings(), engineConfig.getShardId()); this.failedEngineListener = engineConfig.getFailedEngineListener(); this.deletionPolicy = engineConfig.getDeletionPolicy(); + this.searcherWrapper = engineConfig.getSearcherWrapper(); } /** Returns 0 in the case where accountable is null, otherwise returns {@code ramBytesUsed()} */ @@ -279,7 +282,7 @@ public abstract class Engine implements Closeable { try { final Searcher retVal = newSearcher(source, searcher, manager); success = true; - return config().getWrappingService().wrap(engineConfig, retVal); + return wrap(engineConfig, retVal); } finally { if (!success) { manager.release(searcher); @@ -298,6 +301,38 @@ public abstract class Engine implements Closeable { } } + /** + * If there are configured {@link IndexSearcherWrapper} instances, the {@link IndexSearcher} of the provided engine searcher + * gets wrapped and a new {@link Searcher} instances is returned, otherwise the provided {@link Searcher} is returned. + * + * This is invoked each time a {@link Searcher} is requested to do an operation. (for example search) + */ + private Searcher wrap(EngineConfig engineConfig, final Searcher engineSearcher) throws EngineException { + if (searcherWrapper == null) { + return engineSearcher; + } + + DirectoryReader reader = searcherWrapper.wrap((DirectoryReader) engineSearcher.reader()); + IndexSearcher innerIndexSearcher = new IndexSearcher(reader); + innerIndexSearcher.setQueryCache(engineConfig.getQueryCache()); + innerIndexSearcher.setQueryCachingPolicy(engineConfig.getQueryCachingPolicy()); + innerIndexSearcher.setSimilarity(engineConfig.getSimilarity()); + // TODO: Right now IndexSearcher isn't wrapper friendly, when it becomes wrapper friendly we should revise this extension point + // For example if IndexSearcher#rewrite() is overwritten than also IndexSearcher#createNormalizedWeight needs to be overwritten + // This needs to be fixed before we can allow the IndexSearcher from Engine to be wrapped multiple times + IndexSearcher indexSearcher = searcherWrapper.wrap(engineConfig, innerIndexSearcher); + if (reader == engineSearcher.reader() && indexSearcher == innerIndexSearcher) { + return engineSearcher; + } else { + return new Engine.Searcher(engineSearcher.source(), indexSearcher) { + @Override + public void close() throws ElasticsearchException { + engineSearcher.close(); + } + }; + } + } + /** returns the translog for this engine */ public abstract Translog getTranslog(); diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index c6e67243514..7d57bb5b0b3 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.similarities.Similarity; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -73,7 +74,7 @@ public final class EngineConfig { private final boolean forceNewTranslog; private final QueryCache queryCache; private final QueryCachingPolicy queryCachingPolicy; - private final IndexSearcherWrappingService wrappingService; + private final SetOnce searcherWrapper = new SetOnce<>(); /** * Index setting for compound file on flush. This setting is realtime updateable. @@ -121,7 +122,7 @@ public final class EngineConfig { Settings indexSettings, IndicesWarmer warmer, Store store, SnapshotDeletionPolicy deletionPolicy, MergePolicy mergePolicy, MergeSchedulerConfig mergeSchedulerConfig, Analyzer analyzer, Similarity similarity, CodecService codecService, Engine.FailedEngineListener failedEngineListener, - TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, IndexSearcherWrappingService wrappingService, TranslogConfig translogConfig) { + TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, TranslogConfig translogConfig) { this.shardId = shardId; this.indexSettings = indexSettings; this.threadPool = threadPool; @@ -135,7 +136,6 @@ public final class EngineConfig { this.similarity = similarity; this.codecService = codecService; this.failedEngineListener = failedEngineListener; - this.wrappingService = wrappingService; this.compoundOnFlush = indexSettings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, compoundOnFlush); codecName = indexSettings.get(EngineConfig.INDEX_CODEC_SETTING, EngineConfig.DEFAULT_CODEC_NAME); indexingBufferSize = DEFAULT_INDEX_BUFFER_SIZE; @@ -380,8 +380,12 @@ public final class EngineConfig { return queryCachingPolicy; } - public IndexSearcherWrappingService getWrappingService() { - return wrappingService; + IndexSearcherWrapper getSearcherWrapper() { + return searcherWrapper.get(); + } + + public void setSearcherWrapper(IndexSearcherWrapper searcherWrapper) { + this.searcherWrapper.set(searcherWrapper); } /** diff --git a/core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrapper.java b/core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrapper.java index 665d17a2f86..8a407f00eae 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrapper.java +++ b/core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrapper.java @@ -26,7 +26,7 @@ import org.apache.lucene.search.IndexSearcher; * Extension point to add custom functionality at request time to the {@link DirectoryReader} * and {@link IndexSearcher} managed by the {@link Engine}. */ -public interface IndexSearcherWrapper { +interface IndexSearcherWrapper { /** * @param reader The provided directory reader to be wrapped to add custom functionality diff --git a/core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrappingService.java b/core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrappingService.java deleted file mode 100644 index 23d05f01dc7..00000000000 --- a/core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrappingService.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.engine; - -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.index.engine.Engine.Searcher; - -import java.util.Set; - -/** - * Service responsible for wrapping the {@link DirectoryReader} and {@link IndexSearcher} of a {@link Searcher} via the - * configured {@link IndexSearcherWrapper} instance. This allows custom functionally to be added the {@link Searcher} - * before being used to do an operation (search, get, field stats etc.) - */ -// TODO: This needs extension point is a bit hacky now, because the IndexSearch from the engine can only be wrapped once, -// if we allowed the IndexSearcher to be wrapped multiple times then a custom IndexSearcherWrapper needs have good -// control over its location in the wrapping chain -public final class IndexSearcherWrappingService { - - private final IndexSearcherWrapper wrapper; - - // for unit tests: - IndexSearcherWrappingService() { - this.wrapper = null; - } - - @Inject - // Use a Set parameter here, because constructor parameter can't be optional - // and I prefer to keep the `wrapper` field final. - public IndexSearcherWrappingService(Set wrappers) { - if (wrappers.size() > 1) { - throw new IllegalStateException("wrapping of the index searcher by more than one wrappers is forbidden, found the following wrappers [" + wrappers + "]"); - } - if (wrappers.isEmpty()) { - this.wrapper = null; - } else { - this.wrapper = wrappers.iterator().next(); - } - } - - /** - * If there are configured {@link IndexSearcherWrapper} instances, the {@link IndexSearcher} of the provided engine searcher - * gets wrapped and a new {@link Searcher} instances is returned, otherwise the provided {@link Searcher} is returned. - * - * This is invoked each time a {@link Searcher} is requested to do an operation. (for example search) - */ - public Searcher wrap(EngineConfig engineConfig, final Searcher engineSearcher) throws EngineException { - if (wrapper == null) { - return engineSearcher; - } - - DirectoryReader reader = wrapper.wrap((DirectoryReader) engineSearcher.reader()); - IndexSearcher innerIndexSearcher = new IndexSearcher(reader); - innerIndexSearcher.setQueryCache(engineConfig.getQueryCache()); - innerIndexSearcher.setQueryCachingPolicy(engineConfig.getQueryCachingPolicy()); - innerIndexSearcher.setSimilarity(engineConfig.getSimilarity()); - // TODO: Right now IndexSearcher isn't wrapper friendly, when it becomes wrapper friendly we should revise this extension point - // For example if IndexSearcher#rewrite() is overwritten than also IndexSearcher#createNormalizedWeight needs to be overwritten - // This needs to be fixed before we can allow the IndexSearcher from Engine to be wrapped multiple times - IndexSearcher indexSearcher = wrapper.wrap(engineConfig, innerIndexSearcher); - if (reader == engineSearcher.reader() && indexSearcher == innerIndexSearcher) { - return engineSearcher; - } else { - return new Engine.Searcher(engineSearcher.source(), indexSearcher) { - - @Override - public void close() throws ElasticsearchException { - engineSearcher.close(); - } - }; - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index c98a9c0f9dd..2137a42c658 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -51,11 +51,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.gateway.MetaDataStateFormat; +import org.elasticsearch.index.IndexServicesProvider; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.aliases.IndexAliasesService; import org.elasticsearch.index.cache.IndexCache; @@ -99,7 +99,6 @@ import org.elasticsearch.index.translog.TranslogStats; import org.elasticsearch.index.translog.TranslogWriter; import org.elasticsearch.index.warmer.ShardIndexWarmerService; import org.elasticsearch.index.warmer.WarmerStats; -import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.InternalIndicesLifecycle; import org.elasticsearch.indices.cache.query.IndicesQueryCache; @@ -161,7 +160,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett protected volatile IndexShardState state; protected final AtomicReference currentEngineReference = new AtomicReference<>(); protected final EngineFactory engineFactory; - private final IndexSearcherWrappingService wrappingService; @Nullable private RecoveryState recoveryState; @@ -193,39 +191,33 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett private EnumSet readAllowedStates = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED, IndexShardState.POST_RECOVERY); @Inject - public IndexShard(ShardId shardId, @IndexSettings Settings indexSettings, IndicesLifecycle indicesLifecycle, Store store, - ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndexAliasesService indexAliasesService, - IndicesQueryCache indicesQueryCache, CodecService codecService, - TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, - @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, - ShardPath path, BigArrays bigArrays, IndexSearcherWrappingService wrappingService) { + public IndexShard(ShardId shardId, @IndexSettings Settings indexSettings, ShardPath path, Store store, IndexServicesProvider provider) { super(shardId, indexSettings); - this.codecService = codecService; - this.warmer = warmer; + this.codecService = provider.getCodecService(); + this.warmer = provider.getWarmer(); this.deletionPolicy = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); - this.similarityService = similarityService; - this.wrappingService = wrappingService; + this.similarityService = provider.getSimilarityService(); Objects.requireNonNull(store, "Store must be provided to the index shard"); - this.engineFactory = factory; - this.indicesLifecycle = (InternalIndicesLifecycle) indicesLifecycle; + this.engineFactory = provider.getFactory(); + this.indicesLifecycle = (InternalIndicesLifecycle) provider.getIndicesLifecycle(); this.store = store; this.mergeSchedulerConfig = new MergeSchedulerConfig(indexSettings); - this.threadPool = threadPool; - this.mapperService = mapperService; - this.queryParserService = queryParserService; - this.indexCache = indexCache; - this.indexAliasesService = indexAliasesService; + this.threadPool = provider.getThreadPool(); + this.mapperService = provider.getMapperService(); + this.queryParserService = provider.getQueryParserService(); + this.indexCache = provider.getIndexCache(); + this.indexAliasesService = provider.getIndexAliasesService(); this.indexingService = new ShardIndexingService(shardId, indexSettings); this.getService = new ShardGetService(this, mapperService); - this.termVectorsService = termVectorsService; + this.termVectorsService = provider.getTermVectorsService(); this.searchService = new ShardSearchStats(indexSettings); this.shardWarmerService = new ShardIndexWarmerService(shardId, indexSettings); - this.indicesQueryCache = indicesQueryCache; + this.indicesQueryCache = provider.getIndicesQueryCache(); this.shardQueryCache = new ShardRequestCache(shardId, indexSettings); this.shardFieldData = new ShardFieldData(); this.shardPercolateService = new ShardPercolateService(shardId, indexSettings); + this.indexFieldDataService = provider.getIndexFieldDataService(); this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, queryParserService, indexingService, indicesLifecycle, mapperService, indexFieldDataService, shardPercolateService); - this.indexFieldDataService = indexFieldDataService; this.shardBitsetFilterCache = new ShardBitsetFilterCache(shardId, indexSettings); state = IndexShardState.CREATED; this.refreshInterval = indexSettings.getAsTime(INDEX_REFRESH_INTERVAL, EngineConfig.DEFAULT_REFRESH_INTERVAL); @@ -238,7 +230,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett this.checkIndexOnStartup = indexSettings.get("index.shard.check_on_startup", "false"); this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, getFromSettings(logger, indexSettings, Translog.Durabilty.REQUEST), - bigArrays, threadPool); + provider.getBigArrays(), threadPool); final QueryCachingPolicy cachingPolicy; // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis @@ -1403,7 +1395,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett }; return new EngineConfig(shardId, threadPool, indexingService, indexSettings, warmer, store, deletionPolicy, mergePolicyConfig.getMergePolicy(), mergeSchedulerConfig, - mapperService.indexAnalyzer(), similarityService.similarity(), codecService, failedEngineListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, wrappingService, translogConfig); + mapperService.indexAnalyzer(), similarityService.similarity(), codecService, failedEngineListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig); } private static class IndexShardOperationCounter extends AbstractRefCounted { diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java index 188669f3fb2..2d97eea08d6 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java @@ -21,12 +21,7 @@ package org.elasticsearch.index.shard; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.engine.IndexSearcherWrapper; -import org.elasticsearch.index.engine.IndexSearcherWrappingService; -import org.elasticsearch.index.engine.EngineFactory; -import org.elasticsearch.index.engine.InternalEngineFactory; /** * The {@code IndexShardModule} module is responsible for binding the correct @@ -39,8 +34,7 @@ public class IndexShardModule extends AbstractModule { private final Settings settings; private final boolean primary; - // pkg private so tests can mock - Class engineFactoryImpl = InternalEngineFactory.class; + public IndexShardModule(ShardId shardId, boolean primary, Settings settings) { this.settings = settings; @@ -64,13 +58,5 @@ public class IndexShardModule extends AbstractModule { } else { bind(IndexShard.class).asEagerSingleton(); } - - bind(EngineFactory.class).to(engineFactoryImpl); - bind(IndexSearcherWrappingService.class).asEagerSingleton(); - // this injects an empty set in IndexSearcherWrappingService, otherwise guice can't construct IndexSearcherWrappingService - Multibinder multibinder - = Multibinder.newSetBinder(binder(), IndexSearcherWrapper.class); } - - } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java index 62fa928faf1..c81b9e5c541 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java @@ -18,32 +18,14 @@ */ package org.elasticsearch.index.shard; -import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.aliases.IndexAliasesService; -import org.elasticsearch.index.cache.IndexCache; -import org.elasticsearch.index.codec.CodecService; -import org.elasticsearch.index.engine.IndexSearcherWrappingService; +import org.elasticsearch.index.IndexServicesProvider; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; -import org.elasticsearch.index.engine.EngineFactory; -import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.settings.IndexSettings; -import org.elasticsearch.index.settings.IndexSettingsService; -import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.Store; -import org.elasticsearch.index.termvectors.TermVectorsService; -import org.elasticsearch.indices.IndicesLifecycle; -import org.elasticsearch.indices.IndicesWarmer; -import org.elasticsearch.indices.cache.query.IndicesQueryCache; -import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -55,23 +37,8 @@ import java.io.IOException; */ public final class ShadowIndexShard extends IndexShard { - @Inject - public ShadowIndexShard(ShardId shardId, @IndexSettings Settings indexSettings, - IndicesLifecycle indicesLifecycle, Store store, - ThreadPool threadPool, MapperService mapperService, - IndexQueryParserService queryParserService, IndexCache indexCache, - IndexAliasesService indexAliasesService, IndicesQueryCache indicesQueryCache, - CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, - @Nullable IndicesWarmer warmer, - SimilarityService similarityService, - EngineFactory factory, - ShardPath path, BigArrays bigArrays, IndexSearcherWrappingService wrappingService) throws IOException { - super(shardId, indexSettings, indicesLifecycle, store, - threadPool, mapperService, queryParserService, indexCache, indexAliasesService, - indicesQueryCache, codecService, - termVectorsService, indexFieldDataService, - warmer, similarityService, - factory, path, bigArrays, wrappingService); + public ShadowIndexShard(ShardId shardId, @IndexSettings Settings indexSettings, ShardPath path, Store store, IndexServicesProvider provider) throws IOException { + super(shardId, indexSettings, path, store, provider); } /** diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java index 4022dd75aa1..350eb7a0d81 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettingsService; +import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.indices.store.IndicesStore; import java.io.Closeable; @@ -112,7 +113,8 @@ public class IndexStore extends AbstractIndexComponent implements Closeable { /** * The shard store class that should be used for each shard. */ - public Class shardDirectory() { - return FsDirectoryService.class; + public DirectoryService newDirectoryService(ShardPath path) { + return new FsDirectoryService(indexSettings, this, path); } + } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 3b24544267d..e2448670f83 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -346,7 +346,7 @@ public class IndicesService extends AbstractLifecycleComponent i modules.add(new IndexFieldDataModule(indexSettings)); modules.add(new MapperServiceModule()); modules.add(new IndexAliasesServiceModule()); - modules.add(new IndexModule(indexSettings)); + modules.add(new IndexModule()); pluginsService.processModules(modules); diff --git a/core/src/main/java/org/elasticsearch/plugins/Plugin.java b/core/src/main/java/org/elasticsearch/plugins/Plugin.java index 72077954ea8..04f789e5c34 100644 --- a/core/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -73,13 +73,6 @@ public abstract class Plugin { return Collections.emptyList(); } - /** - * Per index shard module. - */ - public Collection shardModules(Settings indexSettings) { - return Collections.emptyList(); - } - /** * Per index shard service that will be automatically closed. */ diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 5834efc398d..cd71fdbc785 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -250,14 +250,6 @@ public class PluginsService extends AbstractComponent { return services; } - public Collection shardModules(Settings indexSettings) { - List modules = new ArrayList<>(); - for (Tuple plugin : plugins) { - modules.addAll(plugin.v2().shardModules(indexSettings)); - } - return modules; - } - public Collection> shardServices() { List> services = new ArrayList<>(); for (Tuple plugin : plugins) { diff --git a/core/src/test/java/org/elasticsearch/index/shard/MockEngineFactoryPlugin.java b/core/src/test/java/org/elasticsearch/index/MockEngineFactoryPlugin.java similarity index 88% rename from core/src/test/java/org/elasticsearch/index/shard/MockEngineFactoryPlugin.java rename to core/src/test/java/org/elasticsearch/index/MockEngineFactoryPlugin.java index d1b50487c63..94ddde0e3fb 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/MockEngineFactoryPlugin.java +++ b/core/src/test/java/org/elasticsearch/index/MockEngineFactoryPlugin.java @@ -16,10 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.shard; +package org.elasticsearch.index; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.engine.MockEngineFactory; import org.elasticsearch.test.engine.MockEngineSupportModule; @@ -27,7 +28,7 @@ import org.elasticsearch.test.engine.MockEngineSupportModule; import java.util.Collection; import java.util.Collections; -// this must exist in the same package as IndexShardModule to allow access to setting the impl +// this must exist in the same package as IndexModule to allow access to setting the impl public class MockEngineFactoryPlugin extends Plugin { @Override public String name() { @@ -41,7 +42,7 @@ public class MockEngineFactoryPlugin extends Plugin { public Collection indexModules(Settings indexSettings) { return Collections.singletonList(new MockEngineSupportModule()); } - public void onModule(IndexShardModule module) { + public void onModule(IndexModule module) { module.engineFactoryImpl = MockEngineFactory.class; } } diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 01197fbfc5b..62964244344 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -232,15 +232,15 @@ public class InternalEngineTests extends ESTestCase { return new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); } - protected InternalEngine createEngine(Store store, Path translogPath, IndexSearcherWrapper... wrappers) { - return createEngine(defaultSettings, store, translogPath, new MergeSchedulerConfig(defaultSettings), newMergePolicy(), wrappers); + protected InternalEngine createEngine(Store store, Path translogPath) { + return createEngine(defaultSettings, store, translogPath, new MergeSchedulerConfig(defaultSettings), newMergePolicy()); } - protected InternalEngine createEngine(Settings indexSettings, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy, IndexSearcherWrapper... wrappers) { - return new InternalEngine(config(indexSettings, store, translogPath, mergeSchedulerConfig, mergePolicy, wrappers), false); + protected InternalEngine createEngine(Settings indexSettings, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy) { + return new InternalEngine(config(indexSettings, store, translogPath, mergeSchedulerConfig, mergePolicy), false); } - public EngineConfig config(Settings indexSettings, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy, IndexSearcherWrapper... wrappers) { + public EngineConfig config(Settings indexSettings, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy) { IndexWriterConfig iwc = newIndexWriterConfig(); TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); @@ -251,7 +251,7 @@ public class InternalEngineTests extends ESTestCase { public void onFailedEngine(ShardId shardId, String reason, @Nullable Throwable t) { // we don't need to notify anybody in this test } - }, new TranslogHandler(shardId.index().getName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), new IndexSearcherWrappingService(new HashSet<>(Arrays.asList(wrappers))), translogConfig); + }, new TranslogHandler(shardId.index().getName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig); try { config.setCreate(Lucene.indexExists(store.directory()) == false); } catch (IOException e) { @@ -514,7 +514,10 @@ public class InternalEngineTests extends ESTestCase { }; Store store = createStore(); Path translog = createTempDir("translog-test"); - InternalEngine engine = createEngine(store, translog, wrapper); + InternalEngine engine = createEngine(store, translog); + engine.close(); + engine.config().setSearcherWrapper(wrapper); + engine = new InternalEngine(engine.config(), false); Engine.Searcher searcher = engine.acquireSearcher("test"); assertThat(counter.get(), equalTo(2)); searcher.close(); @@ -1951,7 +1954,7 @@ public class InternalEngineTests extends ESTestCase { EngineConfig brokenConfig = new EngineConfig(shardId, threadPool, config.getIndexingService(), config.getIndexSettings() , null, store, createSnapshotDeletionPolicy(), newMergePolicy(), config.getMergeSchedulerConfig(), config.getAnalyzer(), config.getSimilarity(), new CodecService(shardId.index()), config.getFailedEngineListener() - , config.getTranslogRecoveryPerformer(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), new IndexSearcherWrappingService(), translogConfig); + , config.getTranslogRecoveryPerformer(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig); try { new InternalEngine(brokenConfig, false); diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index a6ca90a73db..b5987a92623 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -216,7 +216,7 @@ public class ShadowEngineTests extends ESTestCase { @Override public void onFailedEngine(ShardId shardId, String reason, @Nullable Throwable t) { // we don't need to notify anybody in this test - }}, null, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), new IndexSearcherWrappingService(), translogConfig); + }}, null, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig); try { config.setCreate(Lucene.indexExists(store.directory()) == false); } catch (IOException e) { diff --git a/core/src/test/java/org/elasticsearch/indices/leaks/IndicesLeaksIT.java b/core/src/test/java/org/elasticsearch/indices/leaks/IndicesLeaksIT.java deleted file mode 100644 index 422fee6879f..00000000000 --- a/core/src/test/java/org/elasticsearch/indices/leaks/IndicesLeaksIT.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.indices.leaks; - -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.junit.Test; - -import java.lang.ref.WeakReference; -import java.util.ArrayList; -import java.util.List; - -import static org.elasticsearch.test.ESIntegTestCase.Scope; -import static org.hamcrest.Matchers.nullValue; - -/** - */ -@ClusterScope(scope= Scope.TEST, numDataNodes =1) -public class IndicesLeaksIT extends ESIntegTestCase { - - - @SuppressWarnings({"ConstantConditions", "unchecked"}) - @Test - @BadApple(bugUrl = "https://github.com/elasticsearch/elasticsearch/issues/3232") - public void testIndexShardLifecycleLeak() throws Exception { - - client().admin().indices().prepareCreate("test") - .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)) - .execute().actionGet(); - - client().admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); - - IndicesService indicesService = internalCluster().getDataNodeInstance(IndicesService.class); - IndexService indexService = indicesService.indexServiceSafe("test"); - Injector indexInjector = indexService.injector(); - IndexShard shard = indexService.shardSafe(0); - Injector shardInjector = indexService.shardInjectorSafe(0); - - performCommonOperations(); - - List indexReferences = new ArrayList<>(); - List shardReferences = new ArrayList<>(); - - // TODO if we could iterate over the already created classes on the injector, we can just add them here to the list - // for now, we simple add some classes that make sense - - // add index references - indexReferences.add(new WeakReference(indexService)); - indexReferences.add(new WeakReference(indexInjector)); - indexReferences.add(new WeakReference(indexService.mapperService())); - for (DocumentMapper documentMapper : indexService.mapperService().docMappers(true)) { - indexReferences.add(new WeakReference(documentMapper)); - } - indexReferences.add(new WeakReference(indexService.aliasesService())); - indexReferences.add(new WeakReference(indexService.analysisService())); - indexReferences.add(new WeakReference(indexService.fieldData())); - indexReferences.add(new WeakReference(indexService.queryParserService())); - - - // add shard references - shardReferences.add(new WeakReference(shard)); - shardReferences.add(new WeakReference(shardInjector)); - - indexService = null; - indexInjector = null; - shard = null; - shardInjector = null; - - cluster().wipeIndices("test"); - - for (int i = 0; i < 100; i++) { - System.gc(); - int indexNotCleared = 0; - for (WeakReference indexReference : indexReferences) { - if (indexReference.get() != null) { - indexNotCleared++; - } - } - int shardNotCleared = 0; - for (WeakReference shardReference : shardReferences) { - if (shardReference.get() != null) { - shardNotCleared++; - } - } - logger.info("round {}, indices {}/{}, shards {}/{}", i, indexNotCleared, indexReferences.size(), shardNotCleared, shardReferences.size()); - if (indexNotCleared == 0 && shardNotCleared == 0) { - break; - } - } - - //System.out.println("sleeping");Thread.sleep(1000000); - - for (WeakReference indexReference : indexReferences) { - assertThat("dangling index reference: " + indexReference.get(), indexReference.get(), nullValue()); - } - - for (WeakReference shardReference : shardReferences) { - assertThat("dangling shard reference: " + shardReference.get(), shardReference.get(), nullValue()); - } - } - - private void performCommonOperations() { - client().prepareIndex("test", "type", "1").setSource("field1", "value", "field2", 2, "field3", 3.0f).execute().actionGet(); - client().admin().indices().prepareRefresh().execute().actionGet(); - client().prepareSearch("test").setQuery(QueryBuilders.queryStringQuery("field1:value")).execute().actionGet(); - client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field1", "value")).execute().actionGet(); - } -} diff --git a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java index 53733bca3e3..4d107d1151d 100644 --- a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java +++ b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java @@ -68,7 +68,7 @@ import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineClosedException; import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.MockEngineFactoryPlugin; +import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; diff --git a/core/src/test/java/org/elasticsearch/test/store/MockFSIndexStore.java b/core/src/test/java/org/elasticsearch/test/store/MockFSIndexStore.java index c5b5ac36f00..11a791c04f3 100644 --- a/core/src/test/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/core/src/test/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -24,14 +24,19 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettingsService; +import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.DirectoryService; +import org.elasticsearch.index.store.FsDirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStoreModule; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.plugins.Plugin; public class MockFSIndexStore extends IndexStore { + private final IndicesService indicesService; + public static class TestPlugin extends Plugin { @Override public String name() { @@ -52,13 +57,13 @@ public class MockFSIndexStore extends IndexStore { @Inject public MockFSIndexStore(Index index, @IndexSettings Settings indexSettings, IndexSettingsService indexSettingsService, - IndicesStore indicesStore) { + IndicesStore indicesStore, IndicesService indicesService) { super(index, indexSettings, indexSettingsService, indicesStore); + this.indicesService = indicesService; } - @Override - public Class shardDirectory() { - return MockFSDirectoryService.class; + public DirectoryService newDirectoryService(ShardPath path) { + return new MockFSDirectoryService(indexSettings, this, indicesService, path); } } From d2e3e8cc7bc41bf5f8ef1f40f8fb693d2e3927f7 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 1 Oct 2015 10:32:14 +0200 Subject: [PATCH 25/35] more cleanups --- .../org/elasticsearch/index/IndexService.java | 9 ++- .../index/shard/IndexShardModule.java | 62 ------------------- .../elasticsearch/index/store/IndexStore.java | 1 - .../org/elasticsearch/plugins/Plugin.java | 7 --- .../elasticsearch/plugins/PluginsService.java | 8 --- ...oduleTests.java => IndexServiceTests.java} | 21 +++---- .../plugin/example/JvmExamplePlugin.java | 18 +----- .../store/smbmmapfs/SmbMmapFsIndexStore.java | 5 +- .../smbsimplefs/SmbSimpleFsIndexStore.java | 7 ++- 9 files changed, 27 insertions(+), 111 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java rename core/src/test/java/org/elasticsearch/index/{shard/IndexShardModuleTests.java => IndexServiceTests.java} (67%) diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index f3c330a182b..b48b2aedeae 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -260,6 +260,9 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone if (closed.get()) { throw new IllegalStateException("Can't create shard [" + index.name() + "][" + sShardId + "], closed"); } + if (indexSettings.get("index.translog.type") != null) { // TODO remove? + throw new IllegalStateException("a custom translog type is no longer supported. got [" + indexSettings.get("index.translog.type") + "]"); + } final ShardId shardId = new ShardId(index, sShardId); ShardLock lock = null; boolean success = false; @@ -313,7 +316,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone (primary && IndexMetaData.isOnSharedFilesystem(indexSettings)); IndexStore indexStore = injector.getInstance(IndexStore.class); store = new Store(shardId, indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> injector.getInstance(IndicesQueryCache.class).onClose(shardId))); - if (primary && IndexMetaData.isIndexUsingShadowReplicas(indexSettings)) { + if (useShadowEngine(primary, indexSettings)) { indexShard = new ShadowIndexShard(shardId, indexSettings, path, store, injector.getInstance(IndexServicesProvider.class)); } else { indexShard = new IndexShard(shardId, indexSettings, path, store, injector.getInstance(IndexServicesProvider.class)); @@ -338,6 +341,10 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone } } + static boolean useShadowEngine(boolean primary, Settings indexSettings) { + return primary == false && IndexMetaData.isIndexUsingShadowReplicas(indexSettings); + } + public synchronized void removeShard(int shardId, String reason) { final ShardId sId = new ShardId(index, shardId); final Injector shardInjector; diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java deleted file mode 100644 index 2d97eea08d6..00000000000 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShardModule.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.shard; - -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.settings.Settings; - -/** - * The {@code IndexShardModule} module is responsible for binding the correct - * shard id, index shard, engine factory, and warming service for a newly - * created shard. - */ -public class IndexShardModule extends AbstractModule { - - private final ShardId shardId; - private final Settings settings; - private final boolean primary; - - - - public IndexShardModule(ShardId shardId, boolean primary, Settings settings) { - this.settings = settings; - this.shardId = shardId; - this.primary = primary; - if (settings.get("index.translog.type") != null) { - throw new IllegalStateException("a custom translog type is no longer supported. got [" + settings.get("index.translog.type") + "]"); - } - } - - /** Return true if a shadow engine should be used */ - protected boolean useShadowEngine() { - return primary == false && IndexMetaData.isIndexUsingShadowReplicas(settings); - } - - @Override - protected void configure() { - bind(ShardId.class).toInstance(shardId); - if (useShadowEngine()) { - bind(IndexShard.class).to(ShadowIndexShard.class).asEagerSingleton(); - } else { - bind(IndexShard.class).asEagerSingleton(); - } - } -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java index 350eb7a0d81..3a23a09a652 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java @@ -116,5 +116,4 @@ public class IndexStore extends AbstractIndexComponent implements Closeable { public DirectoryService newDirectoryService(ShardPath path) { return new FsDirectoryService(indexSettings, this, path); } - } diff --git a/core/src/main/java/org/elasticsearch/plugins/Plugin.java b/core/src/main/java/org/elasticsearch/plugins/Plugin.java index 04f789e5c34..4229c54401a 100644 --- a/core/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -73,13 +73,6 @@ public abstract class Plugin { return Collections.emptyList(); } - /** - * Per index shard service that will be automatically closed. - */ - public Collection> shardServices() { - return Collections.emptyList(); - } - /** * Additional node settings loaded by the plugin. Note that settings that are explicit in the nodes settings can't be * overwritten with the additional settings. These settings added if they don't exist. diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index cd71fdbc785..9582d3f1714 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -250,14 +250,6 @@ public class PluginsService extends AbstractComponent { return services; } - public Collection> shardServices() { - List> services = new ArrayList<>(); - for (Tuple plugin : plugins) { - services.addAll(plugin.v2().shardServices()); - } - return services; - } - /** * Get information about plugins (jvm and site plugins). */ diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java similarity index 67% rename from core/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java rename to core/src/test/java/org/elasticsearch/index/IndexServiceTests.java index e488905710a..7d66382440a 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -17,19 +17,19 @@ * under the License. */ -package org.elasticsearch.index.shard; +package org.elasticsearch.index; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.junit.Test; -/** Unit test(s) for IndexShardModule */ -public class IndexShardModuleTests extends ESTestCase { +/** Unit test(s) for IndexService */ +public class IndexServiceTests extends ESTestCase { @Test public void testDetermineShadowEngineShouldBeUsed() { - ShardId shardId = new ShardId("myindex", 0); Settings regularSettings = Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 2) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) @@ -41,14 +41,9 @@ public class IndexShardModuleTests extends ESTestCase { .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) .build(); - IndexShardModule ism1 = new IndexShardModule(shardId, true, regularSettings); - IndexShardModule ism2 = new IndexShardModule(shardId, false, regularSettings); - IndexShardModule ism3 = new IndexShardModule(shardId, true, shadowSettings); - IndexShardModule ism4 = new IndexShardModule(shardId, false, shadowSettings); - - assertFalse("no shadow replicas for normal settings", ism1.useShadowEngine()); - assertFalse("no shadow replicas for normal settings", ism2.useShadowEngine()); - assertFalse("no shadow replicas for primary shard with shadow settings", ism3.useShadowEngine()); - assertTrue("shadow replicas for replica shards with shadow settings", ism4.useShadowEngine()); + assertFalse("no shadow replicas for normal settings", IndexService.useShadowEngine(true, regularSettings)); + assertFalse("no shadow replicas for normal settings", IndexService.useShadowEngine(false, regularSettings)); + assertFalse("no shadow replicas for primary shard with shadow settings", IndexService.useShadowEngine(true, shadowSettings)); + assertTrue("shadow replicas for replica shards with shadow settings",IndexService.useShadowEngine(false, shadowSettings)); } } diff --git a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java index fc9de8912d6..9c4ec733a9f 100644 --- a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java +++ b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java @@ -66,24 +66,10 @@ public class JvmExamplePlugin extends Plugin { } @Override - public Collection indexModules(Settings indexSettings) { - return Collections.emptyList(); - } + public Collection indexModules(Settings indexSettings) { return Collections.emptyList();} @Override - public Collection> indexServices() { - return Collections.emptyList(); - } - - @Override - public Collection shardModules(Settings indexSettings) { - return Collections.emptyList(); - } - - @Override - public Collection> shardServices() { - return Collections.emptyList(); - } + public Collection> indexServices() { return Collections.emptyList();} @Override public Settings additionalSettings() { diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsIndexStore.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsIndexStore.java index 04229756e1b..1d1592dcf32 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsIndexStore.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsIndexStore.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettingsService; +import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.indices.store.IndicesStore; @@ -37,7 +38,7 @@ public class SmbMmapFsIndexStore extends IndexStore { } @Override - public Class shardDirectory() { - return SmbMmapFsDirectoryService.class; + public DirectoryService newDirectoryService(ShardPath path) { + return new SmbMmapFsDirectoryService(indexSettings(), this, path); } } diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbsimplefs/SmbSimpleFsIndexStore.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbsimplefs/SmbSimpleFsIndexStore.java index 48133440f6d..67d396a80a5 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbsimplefs/SmbSimpleFsIndexStore.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbsimplefs/SmbSimpleFsIndexStore.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettingsService; +import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.indices.store.IndicesStore; @@ -36,9 +37,13 @@ public class SmbSimpleFsIndexStore extends IndexStore { super(index, indexSettings, indexSettingsService, indicesStore); } - @Override public Class shardDirectory() { return SmbSimpleFsDirectoryService.class; } + + @Override + public DirectoryService newDirectoryService(ShardPath path) { + return new SmbSimpleFsDirectoryService(indexSettings(), this, path); + } } From a892a35f403de0eecdcf694d03c0c2e1a817d6ad Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 1 Oct 2015 20:00:10 +0200 Subject: [PATCH 26/35] Hide engine entirely in IndexShard and do searcher wrapping only on top of the engine --- .../TransportIndicesSegmentsAction.java | 2 +- .../get/TransportUpgradeStatusAction.java | 2 +- .../org/elasticsearch/index/IndexModule.java | 12 ++- .../org/elasticsearch/index/IndexService.java | 17 ---- .../index/IndexServicesProvider.java | 11 ++- .../elasticsearch/index/engine/Engine.java | 37 +-------- .../index/engine/EngineConfig.java | 9 +-- .../index/engine/IndexSearcherWrapper.java | 47 ----------- .../percolator/PercolatorQueriesRegistry.java | 2 +- .../index/shard/IndexSearcherWrapper.java | 79 +++++++++++++++++++ .../elasticsearch/index/shard/IndexShard.java | 35 ++++++-- .../memory/IndexingMemoryController.java | 2 +- .../recovery/RecoverySourceHandler.java | 10 +-- .../SharedFSRecoverySourceHandler.java | 2 +- .../engine/InternalEngineSettingsTests.java | 3 +- .../index/engine/InternalEngineTests.java | 12 +-- .../index/shard/EngineAccess.java | 31 ++++++++ .../test/ESSingleNodeTestCase.java | 4 - .../test/InternalTestCluster.java | 2 +- 19 files changed, 177 insertions(+), 142 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrapper.java create mode 100644 core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java create mode 100644 core/src/test/java/org/elasticsearch/index/shard/EngineAccess.java diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java index e7770a52aee..bac0ff91b3e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java @@ -95,6 +95,6 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastByNodeActi protected ShardSegments shardOperation(IndicesSegmentsRequest request, ShardRouting shardRouting) { IndexService indexService = indicesService.indexServiceSafe(shardRouting.getIndex()); IndexShard indexShard = indexService.shardSafe(shardRouting.id()); - return new ShardSegments(indexShard.routingEntry(), indexShard.engine().segments(request.verbose())); + return new ShardSegments(indexShard.routingEntry(), indexShard.segments(request.verbose())); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java index ea2a2ede6c6..c1408e1bd80 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java @@ -97,7 +97,7 @@ public class TransportUpgradeStatusAction extends TransportBroadcastByNodeAction protected ShardUpgradeStatus shardOperation(UpgradeStatusRequest request, ShardRouting shardRouting) { IndexService indexService = indicesService.indexServiceSafe(shardRouting.shardId().getIndex()); IndexShard indexShard = indexService.shardSafe(shardRouting.shardId().id()); - List segments = indexShard.engine().segments(false); + List segments = indexShard.segments(false); long total_bytes = 0; long to_upgrade_bytes = 0; long to_upgrade_bytes_ancient = 0; diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index 59bec88d81a..0c70dd456ca 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -20,8 +20,10 @@ package org.elasticsearch.index; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.util.Providers; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.shard.IndexSearcherWrapper; /** * @@ -30,11 +32,19 @@ public class IndexModule extends AbstractModule { // pkg private so tests can mock Class engineFactoryImpl = InternalEngineFactory.class; + Class indexSearcherWrapper = null; @Override protected void configure() { - bind(EngineFactory.class).to(engineFactoryImpl); + bind(EngineFactory.class).to(engineFactoryImpl).asEagerSingleton(); + if (indexSearcherWrapper == null) { + bind(IndexSearcherWrapper.class).toProvider(Providers.of(null)); + } else { + bind(IndexSearcherWrapper.class).to(indexSearcherWrapper).asEagerSingleton(); + } bind(IndexService.class).asEagerSingleton(); bind(IndexServicesProvider.class).asEagerSingleton(); } + + } diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index b48b2aedeae..22419ba23cf 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -390,23 +390,6 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone } } - /** - * Closes an optional resource. Returns true if the resource was found; - * NOTE: this method swallows all exceptions thrown from the close method of the injector and logs them as debug log - */ - private boolean closeInjectorOptionalResource(ShardId shardId, Injector shardInjector, Class toClose) { - try { - final Closeable instance = shardInjector.getInstance(toClose); - if (instance == null) { - return false; - } - IOUtils.close(instance); - } catch (Throwable t) { - logger.debug("{} failed to close {}", t, shardId, Strings.toUnderscoreCase(toClose.getSimpleName())); - } - return true; - } - private void onShardClose(ShardLock lock, boolean ownsShard) { if (deleted.get()) { // we remove that shards content if this index has been deleted diff --git a/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java b/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java index ad136c58dd6..0a34fabd7b1 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java +++ b/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.aliases.IndexAliasesService; @@ -27,6 +28,7 @@ import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.IndexQueryParserService; +import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.indices.IndicesLifecycle; @@ -56,8 +58,12 @@ public final class IndexServicesProvider { private final EngineFactory factory; private final BigArrays bigArrays; + + + private final IndexSearcherWrapper indexSearcherWrapper; + @Inject - public IndexServicesProvider(IndicesLifecycle indicesLifecycle, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndexAliasesService indexAliasesService, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays) { + public IndexServicesProvider(IndicesLifecycle indicesLifecycle, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndexAliasesService indexAliasesService, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, @Nullable IndexSearcherWrapper indexSearcherWrapper) { this.indicesLifecycle = indicesLifecycle; this.threadPool = threadPool; this.mapperService = mapperService; @@ -72,6 +78,7 @@ public final class IndexServicesProvider { this.similarityService = similarityService; this.factory = factory; this.bigArrays = bigArrays; + this.indexSearcherWrapper = indexSearcherWrapper; } public IndicesLifecycle getIndicesLifecycle() { @@ -129,4 +136,6 @@ public final class IndexServicesProvider { public BigArrays getBigArrays() { return bigArrays; } + + public IndexSearcherWrapper getIndexSearcherWrapper() { return indexSearcherWrapper; } } diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index dc35b95d2c1..ce5f5178cfe 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -79,8 +79,6 @@ public abstract class Engine implements Closeable { protected final ReentrantReadWriteLock rwl = new ReentrantReadWriteLock(); protected final ReleasableLock readLock = new ReleasableLock(rwl.readLock()); protected final ReleasableLock writeLock = new ReleasableLock(rwl.writeLock()); - private final IndexSearcherWrapper searcherWrapper; - protected volatile Throwable failedEngine = null; protected Engine(EngineConfig engineConfig) { @@ -94,7 +92,6 @@ public abstract class Engine implements Closeable { engineConfig.getIndexSettings(), engineConfig.getShardId()); this.failedEngineListener = engineConfig.getFailedEngineListener(); this.deletionPolicy = engineConfig.getDeletionPolicy(); - this.searcherWrapper = engineConfig.getSearcherWrapper(); } /** Returns 0 in the case where accountable is null, otherwise returns {@code ramBytesUsed()} */ @@ -282,7 +279,7 @@ public abstract class Engine implements Closeable { try { final Searcher retVal = newSearcher(source, searcher, manager); success = true; - return wrap(engineConfig, retVal); + return retVal; } finally { if (!success) { manager.release(searcher); @@ -301,38 +298,6 @@ public abstract class Engine implements Closeable { } } - /** - * If there are configured {@link IndexSearcherWrapper} instances, the {@link IndexSearcher} of the provided engine searcher - * gets wrapped and a new {@link Searcher} instances is returned, otherwise the provided {@link Searcher} is returned. - * - * This is invoked each time a {@link Searcher} is requested to do an operation. (for example search) - */ - private Searcher wrap(EngineConfig engineConfig, final Searcher engineSearcher) throws EngineException { - if (searcherWrapper == null) { - return engineSearcher; - } - - DirectoryReader reader = searcherWrapper.wrap((DirectoryReader) engineSearcher.reader()); - IndexSearcher innerIndexSearcher = new IndexSearcher(reader); - innerIndexSearcher.setQueryCache(engineConfig.getQueryCache()); - innerIndexSearcher.setQueryCachingPolicy(engineConfig.getQueryCachingPolicy()); - innerIndexSearcher.setSimilarity(engineConfig.getSimilarity()); - // TODO: Right now IndexSearcher isn't wrapper friendly, when it becomes wrapper friendly we should revise this extension point - // For example if IndexSearcher#rewrite() is overwritten than also IndexSearcher#createNormalizedWeight needs to be overwritten - // This needs to be fixed before we can allow the IndexSearcher from Engine to be wrapped multiple times - IndexSearcher indexSearcher = searcherWrapper.wrap(engineConfig, innerIndexSearcher); - if (reader == engineSearcher.reader() && indexSearcher == innerIndexSearcher) { - return engineSearcher; - } else { - return new Engine.Searcher(engineSearcher.source(), indexSearcher) { - @Override - public void close() throws ElasticsearchException { - engineSearcher.close(); - } - }; - } - } - /** returns the translog for this engine */ public abstract Translog getTranslog(); diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index 7d57bb5b0b3..a79587e4347 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.indexing.ShardIndexingService; +import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.MergeSchedulerConfig; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.TranslogRecoveryPerformer; @@ -380,14 +381,6 @@ public final class EngineConfig { return queryCachingPolicy; } - IndexSearcherWrapper getSearcherWrapper() { - return searcherWrapper.get(); - } - - public void setSearcherWrapper(IndexSearcherWrapper searcherWrapper) { - this.searcherWrapper.set(searcherWrapper); - } - /** * Returns the translog config for this engine */ diff --git a/core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrapper.java b/core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrapper.java deleted file mode 100644 index 8a407f00eae..00000000000 --- a/core/src/main/java/org/elasticsearch/index/engine/IndexSearcherWrapper.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.engine; - -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.search.IndexSearcher; - -/** - * Extension point to add custom functionality at request time to the {@link DirectoryReader} - * and {@link IndexSearcher} managed by the {@link Engine}. - */ -interface IndexSearcherWrapper { - - /** - * @param reader The provided directory reader to be wrapped to add custom functionality - * @return a new directory reader wrapping the provided directory reader or if no wrapping was performed - * the provided directory reader - */ - DirectoryReader wrap(DirectoryReader reader); - - /** - * @param engineConfig The engine config which can be used to get the query cache and query cache policy from - * when creating a new index searcher - * @param searcher The provided index searcher to be wrapped to add custom functionality - * @return a new index searcher wrapping the provided index searcher or if no wrapping was performed - * the provided index searcher - */ - IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) throws EngineException; - -} diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java index 7dd26ec55db..22f2b3cbe44 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java @@ -257,7 +257,7 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple shard.refresh("percolator_load_queries"); // NOTE: we acquire the searcher via the engine directly here since this is executed right // before the shard is marked as POST_RECOVERY - try (Engine.Searcher searcher = shard.engine().acquireSearcher("percolator_load_queries")) { + try (Engine.Searcher searcher = shard.acquireSearcher("percolator_load_queries")) { Query query = new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorService.TYPE_NAME)); QueriesLoaderCollector queryCollector = new QueriesLoaderCollector(PercolatorQueriesRegistry.this, logger, mapperService, indexFieldDataService); IndexSearcher indexSearcher = new IndexSearcher(searcher.reader()); diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java b/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java new file mode 100644 index 00000000000..9bc51f6f57b --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.shard; + +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.search.IndexSearcher; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.engine.EngineException; + +/** + * Extension point to add custom functionality at request time to the {@link DirectoryReader} + * and {@link IndexSearcher} managed by the {@link Engine}. + */ +public interface IndexSearcherWrapper { + + /** + * @param reader The provided directory reader to be wrapped to add custom functionality + * @return a new directory reader wrapping the provided directory reader or if no wrapping was performed + * the provided directory reader + */ + DirectoryReader wrap(DirectoryReader reader); + + /** + * @param engineConfig The engine config which can be used to get the query cache and query cache policy from + * when creating a new index searcher + * @param searcher The provided index searcher to be wrapped to add custom functionality + * @return a new index searcher wrapping the provided index searcher or if no wrapping was performed + * the provided index searcher + */ + IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) throws EngineException; + + /** + * If there are configured {@link IndexSearcherWrapper} instances, the {@link IndexSearcher} of the provided engine searcher + * gets wrapped and a new {@link Engine.Searcher} instances is returned, otherwise the provided {@link Engine.Searcher} is returned. + * + * This is invoked each time a {@link Engine.Searcher} is requested to do an operation. (for example search) + */ + default Engine.Searcher wrap(EngineConfig engineConfig, Engine.Searcher engineSearcher) { + DirectoryReader reader = wrap((DirectoryReader) engineSearcher.reader()); + IndexSearcher innerIndexSearcher = new IndexSearcher(reader); + innerIndexSearcher.setQueryCache(engineConfig.getQueryCache()); + innerIndexSearcher.setQueryCachingPolicy(engineConfig.getQueryCachingPolicy()); + innerIndexSearcher.setSimilarity(engineConfig.getSimilarity()); + // TODO: Right now IndexSearcher isn't wrapper friendly, when it becomes wrapper friendly we should revise this extension point + // For example if IndexSearcher#rewrite() is overwritten than also IndexSearcher#createNormalizedWeight needs to be overwritten + // This needs to be fixed before we can allow the IndexSearcher from Engine to be wrapped multiple times + IndexSearcher indexSearcher = wrap(engineConfig, innerIndexSearcher); + if (reader == engineSearcher.reader() && indexSearcher == innerIndexSearcher) { + return engineSearcher; + } else { + return new Engine.Searcher(engineSearcher.source(), indexSearcher) { + @Override + public void close() throws ElasticsearchException { + engineSearcher.close(); + } + }; + } + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 2137a42c658..ec5db1db46a 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -20,10 +20,8 @@ package org.elasticsearch.index.shard; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.index.CheckIndex; -import org.apache.lucene.index.IndexCommit; -import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; -import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.apache.lucene.index.*; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.UsageTrackingQueryCachingPolicy; import org.apache.lucene.store.AlreadyClosedException; @@ -188,7 +186,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett private final IndexShardOperationCounter indexShardOperationCounter; - private EnumSet readAllowedStates = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED, IndexShardState.POST_RECOVERY); + private final EnumSet readAllowedStates = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED, IndexShardState.POST_RECOVERY); + + private final IndexSearcherWrapper searcherWrapper; @Inject public IndexShard(ShardId shardId, @IndexSettings Settings indexSettings, ShardPath path, Store store, IndexServicesProvider provider) { @@ -244,6 +244,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett this.flushThresholdSize = indexSettings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB)); this.disableFlush = indexSettings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, false); this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId); + this.searcherWrapper = provider.getIndexSearcherWrapper(); } public Store store() { @@ -739,7 +740,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett public Engine.Searcher acquireSearcher(String source) { readAllowed(); - return engine().acquireSearcher(source); + Engine engine = engine(); + return searcherWrapper == null ? engine.acquireSearcher(source) : searcherWrapper.wrap(engineConfig, engine.acquireSearcher(source)); } public void close(String reason, boolean flushEngine) throws IOException { @@ -1167,6 +1169,24 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett } } + public Translog.View acquireTranslogView() { + Engine engine = engine(); + assert engine.getTranslog() != null : "translog must not be null"; + return engine.getTranslog().newView(); + } + + public List segments(boolean verbose) { + return engine().segments(verbose); + } + + public void flushAndCloseEngine() throws IOException { + engine().flushAndClose(); + } + + public Translog getTranslog() { + return engine().getTranslog(); + } + class EngineRefresher implements Runnable { @Override public void run() { @@ -1292,7 +1312,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett recoveryState.getVerifyIndex().checkIndexTime(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - timeNS))); } - public Engine engine() { + Engine engine() { Engine engine = engineUnsafe(); if (engine == null) { throw new EngineClosedException(shardId); @@ -1507,4 +1527,5 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett } return false; } + } diff --git a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java index 8703ab2b10a..5acef57cd6f 100644 --- a/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java +++ b/core/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java @@ -264,7 +264,7 @@ public class IndexingMemoryController extends AbstractLifecycleComponent[] asyncSendFiles(Store store, StoreFileMetaData[] files, Function outputStreamFactory) { diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java index a466147e71c..123480e81de 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java @@ -52,7 +52,7 @@ public class SharedFSRecoverySourceHandler extends RecoverySourceHandler { // if we relocate we need to close the engine in order to open a new // IndexWriter on the other end of the relocation engineClosed = true; - shard.engine().flushAndClose(); + shard.flushAndCloseEngine(); } catch (IOException e) { logger.warn("close engine failed", e); shard.failShard("failed to close engine (phase1)", e); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java index fa5db4cdeb4..78705f54a91 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.engine; import org.apache.lucene.index.LiveIndexWriterConfig; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.shard.EngineAccess; import org.elasticsearch.test.ESSingleNodeTestCase; import java.util.concurrent.TimeUnit; @@ -33,7 +34,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { public void testSettingsUpdate() { final IndexService service = createIndex("foo"); // INDEX_COMPOUND_ON_FLUSH - InternalEngine engine = ((InternalEngine)engine(service)); + InternalEngine engine = ((InternalEngine) EngineAccess.engine(service.shard(0))); assertThat(engine.getCurrentIndexWriterConfig().getUseCompoundFile(), is(true)); client().admin().indices().prepareUpdateSettings("foo").setSettings(Settings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, false).build()).get(); assertThat(engine.getCurrentIndexWriterConfig().getUseCompoundFile(), is(false)); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 62964244344..4b7de9bc3eb 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -67,10 +67,7 @@ import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper; -import org.elasticsearch.index.shard.MergeSchedulerConfig; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardUtils; -import org.elasticsearch.index.shard.TranslogRecoveryPerformer; +import org.elasticsearch.index.shard.*; import org.elasticsearch.index.similarity.SimilarityLookupService; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.DirectoryUtils; @@ -491,8 +488,7 @@ public class InternalEngineTests extends ESTestCase { assertThat(stats2.getUserData(), hasKey(Translog.TRANSLOG_GENERATION_KEY)); assertThat(stats2.getUserData(), hasKey(Translog.TRANSLOG_UUID_KEY)); assertThat(stats2.getUserData().get(Translog.TRANSLOG_GENERATION_KEY), not(equalTo(stats1.getUserData().get(Translog.TRANSLOG_GENERATION_KEY)))); - assertThat(stats2.getUserData().get(Translog.TRANSLOG_UUID_KEY), equalTo(stats1.getUserData().get(Translog.TRANSLOG_UUID_KEY))) - ; + assertThat(stats2.getUserData().get(Translog.TRANSLOG_UUID_KEY), equalTo(stats1.getUserData().get(Translog.TRANSLOG_UUID_KEY))); } @Test @@ -516,9 +512,9 @@ public class InternalEngineTests extends ESTestCase { Path translog = createTempDir("translog-test"); InternalEngine engine = createEngine(store, translog); engine.close(); - engine.config().setSearcherWrapper(wrapper); + engine = new InternalEngine(engine.config(), false); - Engine.Searcher searcher = engine.acquireSearcher("test"); + Engine.Searcher searcher = wrapper.wrap(engine.config(), engine.acquireSearcher("test")); assertThat(counter.get(), equalTo(2)); searcher.close(); IOUtils.close(store, engine); diff --git a/core/src/test/java/org/elasticsearch/index/shard/EngineAccess.java b/core/src/test/java/org/elasticsearch/index/shard/EngineAccess.java new file mode 100644 index 00000000000..58e4ddb67a5 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/shard/EngineAccess.java @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.shard; + +import org.elasticsearch.index.engine.Engine; + +/** + * Test utility to access the engine of a shard + */ +public final class EngineAccess { + + public static Engine engine(IndexShard shard) { + return shard.engine(); + } +} diff --git a/core/src/test/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/core/src/test/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 76602a1ab55..50714e1ed31 100644 --- a/core/src/test/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/core/src/test/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -215,10 +215,6 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { return instanceFromNode.indexServiceSafe(index); } - protected static org.elasticsearch.index.engine.Engine engine(IndexService service) { - return service.shard(0).engine(); - } - /** * Create a new search context. */ diff --git a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java index 4d107d1151d..ab273000ed6 100644 --- a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java +++ b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java @@ -1048,7 +1048,7 @@ public final class InternalTestCluster extends TestCluster { for (IndexService indexService : indexServices) { for (IndexShard indexShard : indexService) { try { - CommitStats commitStats = indexShard.engine().commitStats(); + CommitStats commitStats = indexShard.commitStats(); String syncId = commitStats.getUserData().get(Engine.SYNC_COMMIT_ID); if (syncId != null) { long liveDocsOnShard = commitStats.getNumDocs(); From d6b1f4ce6cf91da27d175827959574307d6de80a Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 2 Oct 2015 15:14:06 +0200 Subject: [PATCH 27/35] Make Percolator a first class citizen in IndexShard and prevent premature index searcher access --- .../cluster/stats/ClusterStatsIndices.java | 2 +- .../admin/indices/stats/CommonStats.java | 4 +- .../org/elasticsearch/index/IndexService.java | 4 +- .../{stats => }/PercolateStats.java | 2 +- .../percolator/PercolatorQueriesRegistry.java | 132 +++++++++--------- .../stats/ShardPercolateService.java | 93 ------------ .../elasticsearch/index/shard/IndexShard.java | 26 ++-- .../indices/NodeIndicesStats.java | 2 +- .../percolator/PercolateContext.java | 7 +- .../percolator/PercolatorService.java | 10 +- .../rest/action/cat/RestNodesAction.java | 2 +- .../test/InternalTestCluster.java | 6 +- 12 files changed, 97 insertions(+), 193 deletions(-) rename core/src/main/java/org/elasticsearch/index/percolator/{stats => }/PercolateStats.java (99%) delete mode 100644 core/src/main/java/org/elasticsearch/index/percolator/stats/ShardPercolateService.java diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java index be7a3f0d4de..ff754be2a20 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIndices.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.fielddata.FieldDataStats; -import org.elasticsearch.index.percolator.stats.PercolateStats; +import org.elasticsearch.index.percolator.PercolateStats; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.search.suggest.completion.CompletionStats; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java index b2f0deeceff..53c07114f96 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java @@ -34,7 +34,7 @@ import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.percolator.stats.PercolateStats; +import org.elasticsearch.index.percolator.PercolateStats; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; @@ -167,7 +167,7 @@ public class CommonStats implements Streamable, ToXContent { segments = indexShard.segmentStats(); break; case Percolate: - percolate = indexShard.shardPercolateService().stats(); + percolate = indexShard.percolateStats(); break; case Translog: translog = indexShard.translogStats(); diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 22419ba23cf..574e4551ba6 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -324,9 +324,8 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone indicesLifecycle.indexShardStateChanged(indexShard, null, "shard created"); indicesLifecycle.afterIndexShardCreated(indexShard); - - shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap(); settingsService.addListener(indexShard); + shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap(); success = true; return indexShard; } catch (IOException e) { @@ -347,7 +346,6 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone public synchronized void removeShard(int shardId, String reason) { final ShardId sId = new ShardId(index, shardId); - final Injector shardInjector; final IndexShard indexShard; if (shards.containsKey(shardId) == false) { return; diff --git a/core/src/main/java/org/elasticsearch/index/percolator/stats/PercolateStats.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolateStats.java similarity index 99% rename from core/src/main/java/org/elasticsearch/index/percolator/stats/PercolateStats.java rename to core/src/main/java/org/elasticsearch/index/percolator/PercolateStats.java index 49f2375a03a..f927a42761f 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/stats/PercolateStats.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolateStats.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.percolator.stats; +package org.elasticsearch.index.percolator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java index 22f2b3cbe44..d811f1f6e71 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.percolator; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -27,6 +28,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -41,20 +44,18 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentTypeListener; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; -import org.elasticsearch.index.percolator.stats.ShardPercolateService; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.shard.AbstractIndexShardComponent; -import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.percolator.PercolatorService; import java.io.Closeable; import java.io.IOException; import java.util.Map; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; /** @@ -64,39 +65,35 @@ import java.util.concurrent.atomic.AtomicBoolean; * Once a document type has been created, the real-time percolator will start to listen to write events and update the * this registry with queries in real time. */ -public class PercolatorQueriesRegistry extends AbstractIndexShardComponent implements Closeable{ +public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent implements Closeable { public final String MAP_UNMAPPED_FIELDS_AS_STRING = "index.percolator.map_unmapped_fields_as_string"; // This is a shard level service, but these below are index level service: private final IndexQueryParserService queryParserService; private final MapperService mapperService; - private final IndicesLifecycle indicesLifecycle; private final IndexFieldDataService indexFieldDataService; private final ShardIndexingService indexingService; - private final ShardPercolateService shardPercolateService; private final ConcurrentMap percolateQueries = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); - private final ShardLifecycleListener shardLifecycleListener = new ShardLifecycleListener(); private final RealTimePercolatorOperationListener realTimePercolatorOperationListener = new RealTimePercolatorOperationListener(); private final PercolateTypeListener percolateTypeListener = new PercolateTypeListener(); private final AtomicBoolean realTimePercolatorEnabled = new AtomicBoolean(false); private boolean mapUnmappedFieldsAsString; + private final MeanMetric percolateMetric = new MeanMetric(); + private final CounterMetric currentMetric = new CounterMetric(); + private final CounterMetric numberOfQueries = new CounterMetric(); public PercolatorQueriesRegistry(ShardId shardId, @IndexSettings Settings indexSettings, IndexQueryParserService queryParserService, - ShardIndexingService indexingService, IndicesLifecycle indicesLifecycle, MapperService mapperService, - IndexFieldDataService indexFieldDataService, ShardPercolateService shardPercolateService) { + ShardIndexingService indexingService, MapperService mapperService, + IndexFieldDataService indexFieldDataService) { super(shardId, indexSettings); this.queryParserService = queryParserService; this.mapperService = mapperService; - this.indicesLifecycle = indicesLifecycle; this.indexingService = indexingService; this.indexFieldDataService = indexFieldDataService; - this.shardPercolateService = shardPercolateService; this.mapUnmappedFieldsAsString = indexSettings.getAsBoolean(MAP_UNMAPPED_FIELDS_AS_STRING, false); - - indicesLifecycle.addListener(shardLifecycleListener); mapperService.addTypeListener(percolateTypeListener); } @@ -107,7 +104,6 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple @Override public void close() { mapperService.removeTypeListener(percolateTypeListener); - indicesLifecycle.removeListener(shardLifecycleListener); indexingService.removeListener(realTimePercolatorOperationListener); clear(); } @@ -116,30 +112,25 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple percolateQueries.clear(); } - void enableRealTimePercolator() { + public void enableRealTimePercolator() { if (realTimePercolatorEnabled.compareAndSet(false, true)) { indexingService.addListener(realTimePercolatorOperationListener); } } - void disableRealTimePercolator() { - if (realTimePercolatorEnabled.compareAndSet(true, false)) { - indexingService.removeListener(realTimePercolatorOperationListener); - } - } - public void addPercolateQuery(String idAsString, BytesReference source) { Query newquery = parsePercolatorDocument(idAsString, source); BytesRef id = new BytesRef(idAsString); - Query previousQuery = percolateQueries.put(id, newquery); - shardPercolateService.addedQuery(id, previousQuery, newquery); + percolateQueries.put(id, newquery); + numberOfQueries.inc(); + } public void removePercolateQuery(String idAsString) { BytesRef id = new BytesRef(idAsString); Query query = percolateQueries.remove(id); if (query != null) { - shardPercolateService.removedQuery(id, query); + numberOfQueries.dec(); } } @@ -225,55 +216,27 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple enableRealTimePercolator(); } } - } - private class ShardLifecycleListener extends IndicesLifecycle.Listener { - - @Override - public void afterIndexShardCreated(IndexShard indexShard) { - if (hasPercolatorType(indexShard)) { - enableRealTimePercolator(); + public void loadQueries(IndexReader reader) { + logger.trace("loading percolator queries..."); + final int loadedQueries; + try { + Query query = new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorService.TYPE_NAME)); + QueriesLoaderCollector queryCollector = new QueriesLoaderCollector(PercolatorQueriesRegistry.this, logger, mapperService, indexFieldDataService); + IndexSearcher indexSearcher = new IndexSearcher(reader); + indexSearcher.setQueryCache(null); + indexSearcher.search(query, queryCollector); + Map queries = queryCollector.queries(); + for (Map.Entry entry : queries.entrySet()) { + percolateQueries.put(entry.getKey(), entry.getValue()); + numberOfQueries.inc(); } + loadedQueries = queries.size(); + } catch (Exception e) { + throw new PercolatorException(shardId.index(), "failed to load queries from percolator index", e); } - - @Override - public void beforeIndexShardPostRecovery(IndexShard indexShard) { - if (hasPercolatorType(indexShard)) { - // percolator index has started, fetch what we can from it and initialize the indices - // we have - logger.trace("loading percolator queries for [{}]...", shardId); - int loadedQueries = loadQueries(indexShard); - logger.debug("done loading [{}] percolator queries for [{}]", loadedQueries, shardId); - } - } - - private boolean hasPercolatorType(IndexShard indexShard) { - ShardId otherShardId = indexShard.shardId(); - return shardId.equals(otherShardId) && mapperService.hasMapping(PercolatorService.TYPE_NAME); - } - - private int loadQueries(IndexShard shard) { - shard.refresh("percolator_load_queries"); - // NOTE: we acquire the searcher via the engine directly here since this is executed right - // before the shard is marked as POST_RECOVERY - try (Engine.Searcher searcher = shard.acquireSearcher("percolator_load_queries")) { - Query query = new TermQuery(new Term(TypeFieldMapper.NAME, PercolatorService.TYPE_NAME)); - QueriesLoaderCollector queryCollector = new QueriesLoaderCollector(PercolatorQueriesRegistry.this, logger, mapperService, indexFieldDataService); - IndexSearcher indexSearcher = new IndexSearcher(searcher.reader()); - indexSearcher.setQueryCache(null); - indexSearcher.search(query, queryCollector); - Map queries = queryCollector.queries(); - for (Map.Entry entry : queries.entrySet()) { - Query previousQuery = percolateQueries.put(entry.getKey(), entry.getValue()); - shardPercolateService.addedQuery(entry.getKey(), previousQuery, entry.getValue()); - } - return queries.size(); - } catch (Exception e) { - throw new PercolatorException(shardId.index(), "failed to load queries from percolator index", e); - } - } - + logger.debug("done loading [{}] percolator queries", loadedQueries); } private class RealTimePercolatorOperationListener extends IndexingOperationListener { @@ -320,4 +283,35 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple } } } + + public void prePercolate() { + currentMetric.inc(); + } + + public void postPercolate(long tookInNanos) { + currentMetric.dec(); + percolateMetric.inc(tookInNanos); + } + + /** + * @return The current metrics + */ + public PercolateStats stats() { + return new PercolateStats(percolateMetric.count(), TimeUnit.NANOSECONDS.toMillis(percolateMetric.sum()), currentMetric.count(), -1, numberOfQueries.count()); + } + + // Enable when a more efficient manner is found for estimating the size of a Lucene query. + /*private static long computeSizeInMemory(HashedBytesRef id, Query query) { + long size = (3 * RamUsageEstimator.NUM_BYTES_INT) + RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + id.bytes.bytes.length; + size += RamEstimator.sizeOf(query); + return size; + } + + private static final class RamEstimator { + // we move this into it's own class to exclude it from the forbidden API checks + // it's fine to use here! + static long sizeOf(Query query) { + return RamUsageEstimator.sizeOf(query); + } + }*/ } diff --git a/core/src/main/java/org/elasticsearch/index/percolator/stats/ShardPercolateService.java b/core/src/main/java/org/elasticsearch/index/percolator/stats/ShardPercolateService.java deleted file mode 100644 index 80f6bd9be38..00000000000 --- a/core/src/main/java/org/elasticsearch/index/percolator/stats/ShardPercolateService.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.percolator.stats; - -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.metrics.CounterMetric; -import org.elasticsearch.common.metrics.MeanMetric; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.settings.IndexSettings; -import org.elasticsearch.index.shard.AbstractIndexShardComponent; -import org.elasticsearch.index.shard.ShardId; - -import java.util.concurrent.TimeUnit; - -/** - * Shard level percolator service that maintains percolator metrics: - *

    - *
  • total time spent in percolate api - *
  • the current number of percolate requests - *
  • number of registered percolate queries - *
- */ -public class ShardPercolateService extends AbstractIndexShardComponent { - - @Inject - public ShardPercolateService(ShardId shardId, @IndexSettings Settings indexSettings) { - super(shardId, indexSettings); - } - - private final MeanMetric percolateMetric = new MeanMetric(); - private final CounterMetric currentMetric = new CounterMetric(); - - private final CounterMetric numberOfQueries = new CounterMetric(); - - public void prePercolate() { - currentMetric.inc(); - } - - public void postPercolate(long tookInNanos) { - currentMetric.dec(); - percolateMetric.inc(tookInNanos); - } - - public void addedQuery(BytesRef id, Query previousQuery, Query newQuery) { - numberOfQueries.inc(); - } - - public void removedQuery(BytesRef id, Query query) { - numberOfQueries.dec(); - } - - /** - * @return The current metrics - */ - public PercolateStats stats() { - return new PercolateStats(percolateMetric.count(), TimeUnit.NANOSECONDS.toMillis(percolateMetric.sum()), currentMetric.count(), -1, numberOfQueries.count()); - } - - // Enable when a more efficient manner is found for estimating the size of a Lucene query. - /*private static long computeSizeInMemory(HashedBytesRef id, Query query) { - long size = (3 * RamUsageEstimator.NUM_BYTES_INT) + RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + id.bytes.bytes.length; - size += RamEstimator.sizeOf(query); - return size; - } - - private static final class RamEstimator { - // we move this into it's own class to exclude it from the forbidden API checks - // it's fine to use here! - static long sizeOf(Query query) { - return RamUsageEstimator.sizeOf(query); - } - }*/ - -} diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index ec5db1db46a..3270463ed9c 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.shard; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.index.*; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.UsageTrackingQueryCachingPolicy; import org.apache.lucene.store.AlreadyClosedException; @@ -73,8 +72,8 @@ import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.indexing.ShardIndexingService; import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.merge.MergeStats; +import org.elasticsearch.index.percolator.PercolateStats; import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; -import org.elasticsearch.index.percolator.stats.ShardPercolateService; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; @@ -102,6 +101,7 @@ import org.elasticsearch.indices.InternalIndicesLifecycle; import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.recovery.RecoveryFailedException; import org.elasticsearch.indices.recovery.RecoveryState; +import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.search.suggest.completion.Completion090PostingsFormat; import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.threadpool.ThreadPool; @@ -134,7 +134,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett private final ShardRequestCache shardQueryCache; private final ShardFieldData shardFieldData; private final PercolatorQueriesRegistry percolatorQueriesRegistry; - private final ShardPercolateService shardPercolateService; private final TermVectorsService termVectorsService; private final IndexFieldDataService indexFieldDataService; private final ShardSuggestMetric shardSuggestMetric = new ShardSuggestMetric(); @@ -215,9 +214,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett this.indicesQueryCache = provider.getIndicesQueryCache(); this.shardQueryCache = new ShardRequestCache(shardId, indexSettings); this.shardFieldData = new ShardFieldData(); - this.shardPercolateService = new ShardPercolateService(shardId, indexSettings); this.indexFieldDataService = provider.getIndexFieldDataService(); - this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, queryParserService, indexingService, indicesLifecycle, mapperService, indexFieldDataService, shardPercolateService); this.shardBitsetFilterCache = new ShardBitsetFilterCache(shardId, indexSettings); state = IndexShardState.CREATED; this.refreshInterval = indexSettings.getAsTime(INDEX_REFRESH_INTERVAL, EngineConfig.DEFAULT_REFRESH_INTERVAL); @@ -245,6 +242,10 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett this.disableFlush = indexSettings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, false); this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId); this.searcherWrapper = provider.getIndexSearcherWrapper(); + this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, queryParserService, indexingService, mapperService, indexFieldDataService); + if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) { + percolatorQueriesRegistry.enableRealTimePercolator(); + } } public Store store() { @@ -614,10 +615,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett return percolatorQueriesRegistry; } - public ShardPercolateService shardPercolateService() { - return shardPercolateService; - } - public TranslogStats translogStats() { return engine().getTranslog().stats(); } @@ -768,8 +765,15 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett } } + public IndexShard postRecovery(String reason) throws IndexShardStartedException, IndexShardRelocatedException, IndexShardClosedException { indicesLifecycle.beforeIndexShardPostRecovery(this); + if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) { + refresh("percolator_load_queries"); + try (Engine.Searcher searcher = engine().acquireSearcher("percolator_load_queries")) { + this.percolatorQueriesRegistry.loadQueries(searcher.reader()); + } + } synchronized (mutex) { if (state == IndexShardState.CLOSED) { throw new IndexShardClosedException(shardId); @@ -1187,6 +1191,10 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett return engine().getTranslog(); } + public PercolateStats percolateStats() { + return percolatorQueriesRegistry.stats(); + } + class EngineRefresher implements Runnable { @Override public void run() { diff --git a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java index 747d15a01f9..c8142f3d37a 100644 --- a/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java +++ b/core/src/main/java/org/elasticsearch/indices/NodeIndicesStats.java @@ -38,7 +38,7 @@ import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.percolator.stats.PercolateStats; +import org.elasticsearch.index.percolator.PercolateStats; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java index 190ffc99293..8cb797cdce0 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolateContext.java @@ -50,6 +50,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.shard.IndexShard; @@ -89,6 +90,7 @@ import java.util.concurrent.ConcurrentMap; */ public class PercolateContext extends SearchContext { + private final PercolatorQueriesRegistry percolateQueryRegistry; public boolean limit; private int size; public boolean doSort; @@ -102,7 +104,6 @@ public class PercolateContext extends SearchContext { private final PageCacheRecycler pageCacheRecycler; private final BigArrays bigArrays; private final ScriptService scriptService; - private final ConcurrentMap percolateQueries; private final int numberOfShards; private final Query aliasFilter; private final long originNanoTime = System.nanoTime(); @@ -133,7 +134,7 @@ public class PercolateContext extends SearchContext { this.indexService = indexService; this.fieldDataService = indexService.fieldData(); this.searchShardTarget = searchShardTarget; - this.percolateQueries = indexShard.percolateRegistry().percolateQueries(); + this.percolateQueryRegistry = indexShard.percolateRegistry(); this.types = new String[]{request.documentType()}; this.pageCacheRecycler = pageCacheRecycler; this.bigArrays = bigArrays.withCircuitBreaking(); @@ -179,7 +180,7 @@ public class PercolateContext extends SearchContext { } public ConcurrentMap percolateQueries() { - return percolateQueries; + return percolateQueryRegistry.percolateQueries(); } public Query percolateQuery() { diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java index ba4ccaeb25e..ef33e10a810 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -71,7 +71,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.percolator.stats.ShardPercolateService; +import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; @@ -86,7 +86,6 @@ import org.elasticsearch.search.aggregations.AggregationPhase; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.SiblingPipelineAggregator; import org.elasticsearch.search.highlight.HighlightField; import org.elasticsearch.search.highlight.HighlightPhase; @@ -179,9 +178,8 @@ public class PercolatorService extends AbstractComponent { IndexService percolateIndexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexShard indexShard = percolateIndexService.shardSafe(request.shardId().id()); indexShard.readAllowed(); // check if we can read the shard... - - ShardPercolateService shardPercolateService = indexShard.shardPercolateService(); - shardPercolateService.prePercolate(); + PercolatorQueriesRegistry percolateQueryRegistry = indexShard.percolateRegistry(); + percolateQueryRegistry.prePercolate(); long startTime = System.nanoTime(); // TODO: The filteringAliases should be looked up at the coordinating node and serialized with all shard request, @@ -255,7 +253,7 @@ public class PercolatorService extends AbstractComponent { } finally { SearchContext.removeCurrent(); context.close(); - shardPercolateService.postPercolate(System.nanoTime() - startTime); + percolateQueryRegistry.postPercolate(System.nanoTime() - startTime); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 8ccf2017a81..337dd41b403 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -43,7 +43,7 @@ import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.percolator.stats.PercolateStats; +import org.elasticsearch.index.percolator.PercolateStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.suggest.stats.SuggestStats; diff --git a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java index ab273000ed6..db11c97bb19 100644 --- a/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java +++ b/core/src/test/java/org/elasticsearch/test/InternalTestCluster.java @@ -1047,8 +1047,8 @@ public final class InternalTestCluster extends TestCluster { IndicesService indexServices = getInstance(IndicesService.class, nodeAndClient.name); for (IndexService indexService : indexServices) { for (IndexShard indexShard : indexService) { - try { - CommitStats commitStats = indexShard.commitStats(); + CommitStats commitStats = indexShard.commitStats(); + if (commitStats != null) { // null if the engine is closed or if the shard is recovering String syncId = commitStats.getUserData().get(Engine.SYNC_COMMIT_ID); if (syncId != null) { long liveDocsOnShard = commitStats.getNumDocs(); @@ -1058,8 +1058,6 @@ public final class InternalTestCluster extends TestCluster { docsOnShards.put(syncId, liveDocsOnShard); } } - } catch (EngineClosedException e) { - // nothing to do, shard is closed } } } From e94f242456bc9022c6fcde1e1bb6e8eb441702d5 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 2 Oct 2015 15:51:42 +0200 Subject: [PATCH 28/35] more index level cleanups --- .../org/elasticsearch/index/IndexService.java | 78 ++++++------------- .../index/IndexServicesProvider.java | 3 - .../elasticsearch/index/shard/IndexShard.java | 2 - .../indices/IndicesLifecycle.java | 11 --- .../indices/InternalIndicesLifecycle.java | 22 ------ .../cluster/IndicesClusterStateService.java | 2 +- .../elasticsearch/index/codec/CodecTests.java | 2 +- .../fielddata/AbstractFieldDataTestCase.java | 2 +- .../test/ESSingleNodeTestCase.java | 10 ++- .../test/InternalTestCluster.java | 2 +- .../elasticsearch/test/TestSearchContext.java | 7 +- 11 files changed, 40 insertions(+), 101 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 574e4551ba6..0493559b417 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -26,9 +26,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; @@ -48,11 +46,8 @@ import org.elasticsearch.index.shard.*; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; -import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InternalIndicesLifecycle; -import org.elasticsearch.indices.cache.query.IndicesQueryCache; -import org.elasticsearch.plugins.PluginsService; import java.io.Closeable; import java.io.IOException; @@ -71,69 +66,48 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; */ public class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable { - private final Injector injector; - private final Settings indexSettings; - private final PluginsService pluginsService; - private final InternalIndicesLifecycle indicesLifecycle; private final AnalysisService analysisService; - private final MapperService mapperService; - - private final IndexQueryParserService queryParserService; - - private final SimilarityService similarityService; - - private final IndexAliasesService aliasesService; - - private final IndexCache indexCache; - private final IndexFieldDataService indexFieldData; private final BitsetFilterCache bitsetFilterCache; private final IndexSettingsService settingsService; - private final NodeEnvironment nodeEnv; private final IndicesService indicesServices; + private final IndexServicesProvider indexServicesProvider; + private final IndexStore indexStore; private volatile ImmutableMap shards = ImmutableMap.of(); - - private final AtomicBoolean closed = new AtomicBoolean(false); private final AtomicBoolean deleted = new AtomicBoolean(false); @Inject - public IndexService(Injector injector, Index index, @IndexSettings Settings indexSettings, NodeEnvironment nodeEnv, - AnalysisService analysisService, MapperService mapperService, IndexQueryParserService queryParserService, - SimilarityService similarityService, IndexAliasesService aliasesService, IndexCache indexCache, + public IndexService(Index index, @IndexSettings Settings indexSettings, NodeEnvironment nodeEnv, + AnalysisService analysisService, IndexSettingsService settingsService, - IndexFieldDataService indexFieldData, BitsetFilterCache bitSetFilterCache, IndicesService indicesServices) { - + IndexFieldDataService indexFieldData, + BitsetFilterCache bitSetFilterCache, + IndicesService indicesServices, + IndexServicesProvider indexServicesProvider, + IndexStore indexStore) { super(index, indexSettings); - this.injector = injector; this.indexSettings = indexSettings; this.analysisService = analysisService; - this.mapperService = mapperService; - this.queryParserService = queryParserService; - this.similarityService = similarityService; - this.aliasesService = aliasesService; - this.indexCache = indexCache; this.indexFieldData = indexFieldData; this.settingsService = settingsService; this.bitsetFilterCache = bitSetFilterCache; - - this.pluginsService = injector.getInstance(PluginsService.class); this.indicesServices = indicesServices; - this.indicesLifecycle = (InternalIndicesLifecycle) injector.getInstance(IndicesLifecycle.class); - - // inject workarounds for cyclic dep + this.indicesLifecycle = (InternalIndicesLifecycle) indexServicesProvider.getIndicesLifecycle(); + this.nodeEnv = nodeEnv; + this.indexServicesProvider = indexServicesProvider; + this.indexStore = indexStore; indexFieldData.setListener(new FieldDataCacheListener(this)); bitSetFilterCache.setListener(new BitsetCacheListener(this)); - this.nodeEnv = nodeEnv; } public int numberOfShards() { @@ -176,16 +150,12 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone return shards.keySet(); } - public Injector injector() { - return injector; - } - public IndexSettingsService settingsService() { return this.settingsService; } public IndexCache cache() { - return indexCache; + return indexServicesProvider.getIndexCache(); } public IndexFieldDataService fieldData() { @@ -201,19 +171,19 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone } public MapperService mapperService() { - return mapperService; + return indexServicesProvider.getMapperService(); } public IndexQueryParserService queryParserService() { - return queryParserService; + return indexServicesProvider.getQueryParserService(); } public SimilarityService similarityService() { - return similarityService; + return indexServicesProvider.getSimilarityService(); } public IndexAliasesService aliasesService() { - return aliasesService; + return indexServicesProvider.getIndexAliasesService(); } public synchronized void close(final String reason, boolean delete) { @@ -288,7 +258,6 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone if (path == null) { // TODO: we should, instead, hold a "bytes reserved" of how large we anticipate this shard will be, e.g. for a shard // that's being relocated/replicated we know how large it will become once it's done copying: - // Count up how many shards are currently on each data path: Map dataPathToShardCount = new HashMap<>(); for(IndexShard shard : this) { @@ -314,12 +283,11 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone // if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary. final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false || (primary && IndexMetaData.isOnSharedFilesystem(indexSettings)); - IndexStore indexStore = injector.getInstance(IndexStore.class); - store = new Store(shardId, indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> injector.getInstance(IndicesQueryCache.class).onClose(shardId))); + store = new Store(shardId, indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> indexServicesProvider.getIndicesQueryCache().onClose(shardId))); if (useShadowEngine(primary, indexSettings)) { - indexShard = new ShadowIndexShard(shardId, indexSettings, path, store, injector.getInstance(IndexServicesProvider.class)); + indexShard = new ShadowIndexShard(shardId, indexSettings, path, store, indexServicesProvider); } else { - indexShard = new IndexShard(shardId, indexSettings, path, store, injector.getInstance(IndexServicesProvider.class)); + indexShard = new IndexShard(shardId, indexSettings, path, store, indexServicesProvider); } indicesLifecycle.indexShardStateChanged(indexShard, null, "shard created"); @@ -407,6 +375,10 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone } } + public IndexServicesProvider getIndexServices() { + return indexServicesProvider; + } + private class StoreCloseListener implements Store.OnClose { private final ShardId shardId; private final boolean ownsShard; diff --git a/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java b/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java index 0a34fabd7b1..fe8428425e2 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java +++ b/core/src/main/java/org/elasticsearch/index/IndexServicesProvider.java @@ -57,9 +57,6 @@ public final class IndexServicesProvider { private final SimilarityService similarityService; private final EngineFactory factory; private final BigArrays bigArrays; - - - private final IndexSearcherWrapper indexSearcherWrapper; @Inject diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 3270463ed9c..42f7a78dda4 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -767,7 +767,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett public IndexShard postRecovery(String reason) throws IndexShardStartedException, IndexShardRelocatedException, IndexShardClosedException { - indicesLifecycle.beforeIndexShardPostRecovery(this); if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) { refresh("percolator_load_queries"); try (Engine.Searcher searcher = engine().acquireSearcher("percolator_load_queries")) { @@ -787,7 +786,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett recoveryState.setStage(RecoveryState.Stage.DONE); changeState(IndexShardState.POST_RECOVERY, reason); } - indicesLifecycle.afterIndexShardPostRecovery(this); return this; } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesLifecycle.java b/core/src/main/java/org/elasticsearch/indices/IndicesLifecycle.java index 211b6d4869d..8c761dfe898 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesLifecycle.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesLifecycle.java @@ -97,17 +97,6 @@ public interface IndicesLifecycle { } - /** - * Called right after the shard is moved into POST_RECOVERY mode - */ - public void afterIndexShardPostRecovery(IndexShard indexShard) {} - - /** - * Called right before the shard is moved into POST_RECOVERY mode. - * The shard is ready to be used but not yet marked as POST_RECOVERY. - */ - public void beforeIndexShardPostRecovery(IndexShard indexShard) {} - /** * Called after the index shard has been started. */ diff --git a/core/src/main/java/org/elasticsearch/indices/InternalIndicesLifecycle.java b/core/src/main/java/org/elasticsearch/indices/InternalIndicesLifecycle.java index 77050714db2..16c0c362c42 100644 --- a/core/src/main/java/org/elasticsearch/indices/InternalIndicesLifecycle.java +++ b/core/src/main/java/org/elasticsearch/indices/InternalIndicesLifecycle.java @@ -121,28 +121,6 @@ public class InternalIndicesLifecycle extends AbstractComponent implements Indic } } - public void beforeIndexShardPostRecovery(IndexShard indexShard) { - for (Listener listener : listeners) { - try { - listener.beforeIndexShardPostRecovery(indexShard); - } catch (Throwable t) { - logger.warn("{} failed to invoke before shard post recovery callback", t, indexShard.shardId()); - throw t; - } - } - } - - - public void afterIndexShardPostRecovery(IndexShard indexShard) { - for (Listener listener : listeners) { - try { - listener.afterIndexShardPostRecovery(indexShard); - } catch (Throwable t) { - logger.warn("{} failed to invoke after shard post recovery callback", t, indexShard.shardId()); - throw t; - } - } - } public void afterIndexShardStarted(IndexShard indexShard) { for (Listener listener : listeners) { diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index eb2bc242a6c..f2d39e1ac80 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -327,7 +327,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent subPhaseContexts = new HashMap<>(); - public TestSearchContext(ThreadPool threadPool,PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, IndexService indexService) { + public TestSearchContext(ThreadPool threadPool,PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ScriptService scriptService, IndexService indexService) { super(ParseFieldMatcher.STRICT, null); this.pageCacheRecycler = pageCacheRecycler; this.bigArrays = bigArrays.withCircuitBreaking(); @@ -108,6 +109,7 @@ public class TestSearchContext extends SearchContext { this.fixedBitSetFilterCache = indexService.bitsetFilterCache(); this.threadPool = threadPool; this.indexShard = indexService.shard(0); + this.scriptService = scriptService; } public TestSearchContext() { @@ -119,6 +121,7 @@ public class TestSearchContext extends SearchContext { this.threadPool = null; this.fixedBitSetFilterCache = null; this.indexShard = null; + scriptService = null; } public void setTypes(String... types) { @@ -325,7 +328,7 @@ public class TestSearchContext extends SearchContext { @Override public ScriptService scriptService() { - return indexService.injector().getInstance(ScriptService.class); + return scriptService; } @Override From 674a9851cfedd899f691d99880128f2bfc9e1570 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 2 Oct 2015 15:55:31 +0200 Subject: [PATCH 29/35] rename fooSafe into getFoo and getFooOrNull --- .../TransportClearIndicesCacheAction.java | 2 +- .../flush/TransportShardFlushAction.java | 4 +- .../optimize/TransportOptimizeAction.java | 2 +- .../recovery/TransportRecoveryAction.java | 2 +- .../refresh/TransportShardRefreshAction.java | 4 +- .../TransportIndicesSegmentsAction.java | 2 +- .../stats/TransportIndicesStatsAction.java | 2 +- .../get/TransportUpgradeStatusAction.java | 2 +- .../upgrade/post/TransportUpgradeAction.java | 2 +- .../query/TransportValidateQueryAction.java | 2 +- .../action/bulk/TransportShardBulkAction.java | 4 +- .../action/delete/TransportDeleteAction.java | 5 +- .../action/exists/TransportExistsAction.java | 2 +- .../explain/TransportExplainAction.java | 2 +- .../TransportFieldStatsTransportAction.java | 2 +- .../action/get/TransportGetAction.java | 2 +- .../get/TransportShardMultiGetAction.java | 2 +- .../action/index/TransportIndexAction.java | 4 +- .../suggest/TransportSuggestAction.java | 2 +- .../TransportReplicationAction.java | 4 +- .../TransportShardMultiTermsVectorAction.java | 2 +- .../TransportTermVectorsAction.java | 2 +- .../action/update/TransportUpdateAction.java | 4 +- .../org/elasticsearch/index/IndexService.java | 20 ++--- .../elasticsearch/index/shard/IndexShard.java | 70 ++++++++-------- .../BlobStoreIndexShardRepository.java | 4 +- .../elasticsearch/indices/IndicesWarmer.java | 2 +- .../cluster/IndicesClusterStateService.java | 8 +- .../indices/flush/SyncedFlushService.java | 6 +- .../memory/IndexingMemoryController.java | 2 +- .../indices/recovery/RecoverySource.java | 2 +- .../indices/store/IndicesStore.java | 2 +- .../TransportNodesListShardStoreMetaData.java | 2 +- .../percolator/PercolatorService.java | 2 +- .../elasticsearch/search/SearchService.java | 2 +- .../snapshots/SnapshotShardsService.java | 2 +- .../shards/IndicesShardStoreRequestIT.java | 2 +- .../upgrade/UpgradeReallyOldIndexIT.java | 2 +- .../cluster/ClusterInfoServiceIT.java | 2 +- .../index/IndexWithShadowReplicasIT.java | 2 +- .../engine/InternalEngineSettingsTests.java | 2 +- .../mapper/all/SimpleAllMapperTests.java | 2 +- .../index/search/MultiMatchQueryTests.java | 2 +- .../index/shard/EngineAccess.java | 2 +- .../index/shard/IndexShardTests.java | 80 +++++++++---------- .../indices/IndicesServiceTests.java | 4 +- .../flush/SyncedFlushSingleNodeTests.java | 12 +-- .../indices/recovery/IndexRecoveryIT.java | 4 +- .../indices/recovery/RecoveryStatusTests.java | 2 +- .../recovery/RecoveriesCollectionTests.java | 2 +- .../elasticsearch/test/TestSearchContext.java | 2 +- 51 files changed, 150 insertions(+), 157 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index 00579290ccc..2308d7be6af 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -83,7 +83,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc protected EmptyResult shardOperation(ClearIndicesCacheRequest request, ShardRouting shardRouting) { IndexService service = indicesService.indexService(shardRouting.getIndex()); if (service != null) { - IndexShard shard = service.shard(shardRouting.id()); + IndexShard shard = service.getShardOrNull(shardRouting.id()); boolean clearedAtLeastOne = false; if (request.queryCache()) { clearedAtLeastOne = true; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java index 2bae799af88..f768cfedc94 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java @@ -62,7 +62,7 @@ public class TransportShardFlushAction extends TransportReplicationAction shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable { - IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).shardSafe(shardRequest.shardId.id()); + IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id()); indexShard.flush(shardRequest.request.getRequest()); logger.trace("{} flush request executed on primary", indexShard.shardId()); return new Tuple<>(new ActionWriteResponse(), shardRequest.request); @@ -70,7 +70,7 @@ public class TransportShardFlushAction extends TransportReplicationAction shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable { - IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).shardSafe(shardRequest.shardId.id()); + IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id()); indexShard.refresh("api"); logger.trace("{} refresh request executed on primary", indexShard.shardId()); return new Tuple<>(new ActionWriteResponse(), shardRequest.request); @@ -71,7 +71,7 @@ public class TransportShardRefreshAction extends TransportReplicationAction segments = indexShard.segments(false); long total_bytes = 0; long to_upgrade_bytes = 0; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index 38375af66f9..30aff1f2e6e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -119,7 +119,7 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) { final BulkShardRequest request = shardRequest.request; final IndexService indexService = indicesService.indexServiceSafe(request.index()); - final IndexShard indexShard = indexService.shardSafe(shardRequest.shardId.id()); + final IndexShard indexShard = indexService.getShard(shardRequest.shardId.id()); long[] preVersions = new long[request.items().length]; VersionType[] preVersionTypes = new VersionType[request.items().length]; @@ -447,7 +447,7 @@ public class TransportShardBulkAction extends TransportReplicationAction shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) { DeleteRequest request = shardRequest.request; - IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).shardSafe(shardRequest.shardId.id()); + IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId.getIndex()).getShard(shardRequest.shardId.id()); Engine.Delete delete = indexShard.prepareDelete(request.type(), request.id(), request.version(), request.versionType(), Engine.Operation.Origin.PRIMARY); indexShard.delete(delete); // update the request with teh version so it will go to the replicas @@ -146,7 +145,7 @@ public class TransportDeleteAction extends TransportReplicationAction fieldStats = new HashMap<>(); IndexService indexServices = indicesService.indexServiceSafe(shardId.getIndex()); MapperService mapperService = indexServices.mapperService(); - IndexShard shard = indexServices.shardSafe(shardId.id()); + IndexShard shard = indexServices.getShard(shardId.id()); try (Engine.Searcher searcher = shard.acquireSearcher("fieldstats")) { for (String field : request.getFields()) { MappedFieldType fieldType = mapperService.fullName(field); diff --git a/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index cba68bd281f..0bcadd6c90a 100644 --- a/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/core/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -92,7 +92,7 @@ public class TransportGetAction extends TransportSingleShardAction result = executeIndexRequestOnPrimary(null, request, indexShard); final IndexResponse response = result.response; @@ -176,7 +176,7 @@ public class TransportIndexAction extends TransportReplicationAction listener, final int retryCount) { IndexService indexService = indicesService.indexServiceSafe(request.concreteIndex()); - IndexShard indexShard = indexService.shardSafe(request.shardId()); + IndexShard indexShard = indexService.getShard(request.shardId()); final UpdateHelper.Result result = updateHelper.prepare(request, indexShard); switch (result.operation()) { case UPSERT: @@ -266,7 +266,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio UpdateResponse update = result.action(); IndexService indexServiceOrNull = indicesService.indexService(request.concreteIndex()); if (indexServiceOrNull != null) { - IndexShard shard = indexService.shard(request.shardId()); + IndexShard shard = indexService.getShardOrNull(request.shardId()); if (shard != null) { shard.indexingService().noopUpdate(request.type()); } diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 0493559b417..2fc7a242db1 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -67,21 +67,15 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; public class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable { private final Settings indexSettings; - private final InternalIndicesLifecycle indicesLifecycle; - private final AnalysisService analysisService; - private final IndexFieldDataService indexFieldData; - private final BitsetFilterCache bitsetFilterCache; - private final IndexSettingsService settingsService; private final NodeEnvironment nodeEnv; private final IndicesService indicesServices; private final IndexServicesProvider indexServicesProvider; private final IndexStore indexStore; - private volatile ImmutableMap shards = ImmutableMap.of(); private final AtomicBoolean closed = new AtomicBoolean(false); private final AtomicBoolean deleted = new AtomicBoolean(false); @@ -131,15 +125,15 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone * Return the shard with the provided id, or null if there is no such shard. */ @Nullable - public IndexShard shard(int shardId) { + public IndexShard getShardOrNull(int shardId) { return shards.get(shardId); } /** * Return the shard with the provided id, or throw an exception if it doesn't exist. */ - public IndexShard shardSafe(int shardId) { - IndexShard indexShard = shard(shardId); + public IndexShard getShard(int shardId) { + IndexShard indexShard = getShardOrNull(shardId); if (indexShard == null) { throw new ShardNotFoundException(new ShardId(index, shardId)); } @@ -420,7 +414,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone @Override public void onCache(ShardId shardId, Accountable accountable) { if (shardId != null) { - final IndexShard shard = indexService.shard(shardId.id()); + final IndexShard shard = indexService.getShardOrNull(shardId.id()); if (shard != null) { long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0l; shard.shardBitsetFilterCache().onCached(ramBytesUsed); @@ -431,7 +425,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone @Override public void onRemoval(ShardId shardId, Accountable accountable) { if (shardId != null) { - final IndexShard shard = indexService.shard(shardId.id()); + final IndexShard shard = indexService.getShardOrNull(shardId.id()); if (shard != null) { long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0l; shard.shardBitsetFilterCache().onRemoval(ramBytesUsed); @@ -450,7 +444,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone @Override public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { if (shardId != null) { - final IndexShard shard = indexService.shard(shardId.id()); + final IndexShard shard = indexService.getShardOrNull(shardId.id()); if (shard != null) { shard.fieldData().onCache(shardId, fieldNames, fieldDataType, ramUsage); } @@ -460,7 +454,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone @Override public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { if (shardId != null) { - final IndexShard shard = indexService.shard(shardId.id()); + final IndexShard shard = indexService.getShardOrNull(shardId.id()); if (shard != null) { shard.fieldData().onRemoval(shardId, fieldNames, fieldDataType, wasEvicted, sizeInBytes); } diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 42f7a78dda4..582ded3a033 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -338,7 +338,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett if (newRouting.state() == ShardRoutingState.STARTED || newRouting.state() == ShardRoutingState.RELOCATING) { // we want to refresh *before* we move to internal STARTED state try { - engine().refresh("cluster_state_started"); + getEngine().refresh("cluster_state_started"); } catch (Throwable t) { logger.debug("failed to refresh due to move to cluster wide started", t); } @@ -447,7 +447,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett if (logger.isTraceEnabled()) { logger.trace("index [{}][{}]{}", create.type(), create.id(), create.docs()); } - engine().create(create); + getEngine().create(create); create.endTime(System.nanoTime()); } catch (Throwable ex) { indexingService.postCreate(create, ex); @@ -486,7 +486,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett if (logger.isTraceEnabled()) { logger.trace("index [{}][{}]{}", index.type(), index.id(), index.docs()); } - created = engine().index(index); + created = getEngine().index(index); index.endTime(System.nanoTime()); } catch (Throwable ex) { indexingService.postIndex(index, ex); @@ -509,7 +509,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett if (logger.isTraceEnabled()) { logger.trace("delete [{}]", delete.uid().text()); } - engine().delete(delete); + getEngine().delete(delete); delete.endTime(System.nanoTime()); } catch (Throwable ex) { indexingService.postDelete(delete, ex); @@ -520,7 +520,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett public Engine.GetResult get(Engine.Get get) { readAllowed(); - return engine().get(get); + return getEngine().get(get); } public void refresh(String source) { @@ -529,7 +529,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett logger.trace("refresh with source: {}", source); } long time = System.nanoTime(); - engine().refresh(source); + getEngine().refresh(source); refreshMetric.inc(System.nanoTime() - time); } @@ -555,7 +555,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett */ @Nullable public CommitStats commitStats() { - Engine engine = engineUnsafe(); + Engine engine = getEngineOrNull(); return engine == null ? null : engine.commitStats(); } @@ -582,7 +582,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett } public MergeStats mergeStats() { - final Engine engine = engineUnsafe(); + final Engine engine = getEngineOrNull(); if (engine == null) { return new MergeStats(); } @@ -590,7 +590,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett } public SegmentsStats segmentStats() { - SegmentsStats segmentsStats = engine().segmentsStats(); + SegmentsStats segmentsStats = getEngine().segmentsStats(); segmentsStats.addBitsetMemoryInBytes(shardBitsetFilterCache.getMemorySizeInBytes()); return segmentsStats; } @@ -616,7 +616,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett } public TranslogStats translogStats() { - return engine().getTranslog().stats(); + return getEngine().getTranslog().stats(); } public SuggestStats suggestStats() { @@ -641,7 +641,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett public Engine.SyncedFlushResult syncFlush(String syncId, Engine.CommitId expectedCommitId) { verifyStartedOrRecovering(); logger.trace("trying to sync flush. sync id [{}]. expected commit id [{}]]", syncId, expectedCommitId); - return engine().syncFlush(syncId, expectedCommitId); + return getEngine().syncFlush(syncId, expectedCommitId); } public Engine.CommitId flush(FlushRequest request) throws ElasticsearchException { @@ -656,7 +656,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett verifyStartedOrRecovering(); long time = System.nanoTime(); - Engine.CommitId commitId = engine().flush(force, waitIfOngoing); + Engine.CommitId commitId = getEngine().flush(force, waitIfOngoing); flushMetric.inc(System.nanoTime() - time); return commitId; @@ -667,7 +667,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett if (logger.isTraceEnabled()) { logger.trace("optimize with {}", optimize); } - engine().forceMerge(optimize.flush(), optimize.maxNumSegments(), optimize.onlyExpungeDeletes(), false, false); + getEngine().forceMerge(optimize.flush(), optimize.maxNumSegments(), optimize.onlyExpungeDeletes(), false, false); } /** @@ -680,7 +680,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett } org.apache.lucene.util.Version previousVersion = minimumCompatibleVersion(); // we just want to upgrade the segments, not actually optimize to a single segment - engine().forceMerge(true, // we need to flush at the end to make sure the upgrade is durable + getEngine().forceMerge(true, // we need to flush at the end to make sure the upgrade is durable Integer.MAX_VALUE, // we just want to upgrade the segments, not actually optimize to a single segment false, true, upgrade.upgradeOnlyAncientSegments()); org.apache.lucene.util.Version version = minimumCompatibleVersion(); @@ -693,7 +693,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett public org.apache.lucene.util.Version minimumCompatibleVersion() { org.apache.lucene.util.Version luceneVersion = null; - for (Segment segment : engine().segments(false)) { + for (Segment segment : getEngine().segments(false)) { if (luceneVersion == null || luceneVersion.onOrAfter(segment.getVersion())) { luceneVersion = segment.getVersion(); } @@ -711,7 +711,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett IndexShardState state = this.state; // one time volatile read // we allow snapshot on closed index shard, since we want to do one after we close the shard and before we close the engine if (state == IndexShardState.STARTED || state == IndexShardState.RELOCATED || state == IndexShardState.CLOSED) { - return engine().snapshotIndex(flushFirst); + return getEngine().snapshotIndex(flushFirst); } else { throw new IllegalIndexShardStateException(shardId, state, "snapshot is not allowed"); } @@ -732,12 +732,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett */ public void failShard(String reason, @Nullable Throwable e) { // fail the engine. This will cause this shard to also be removed from the node's index service. - engine().failEngine(reason, e); + getEngine().failEngine(reason, e); } public Engine.Searcher acquireSearcher(String source) { readAllowed(); - Engine engine = engine(); + Engine engine = getEngine(); return searcherWrapper == null ? engine.acquireSearcher(source) : searcherWrapper.wrap(engineConfig, engine.acquireSearcher(source)); } @@ -769,7 +769,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett public IndexShard postRecovery(String reason) throws IndexShardStartedException, IndexShardRelocatedException, IndexShardClosedException { if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) { refresh("percolator_load_queries"); - try (Engine.Searcher searcher = engine().acquireSearcher("percolator_load_queries")) { + try (Engine.Searcher searcher = getEngine().acquireSearcher("percolator_load_queries")) { this.percolatorQueriesRegistry.loadQueries(searcher.reader()); } } @@ -809,7 +809,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett if (state != IndexShardState.RECOVERING) { throw new IndexShardNotRecoveringException(shardId, state); } - return engineConfig.getTranslogRecoveryPerformer().performBatchRecovery(engine(), operations); + return engineConfig.getTranslogRecoveryPerformer().performBatchRecovery(getEngine(), operations); } /** @@ -848,7 +848,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett * a remote peer. */ public void skipTranslogRecovery() throws IOException { - assert engineUnsafe() == null : "engine was already created"; + assert getEngineOrNull() == null : "engine was already created"; internalPerformTranslogRecovery(true, true); assert recoveryState.getTranslog().recoveredOperations() == 0; } @@ -888,7 +888,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett */ public void finalizeRecovery() { recoveryState().setStage(RecoveryState.Stage.FINALIZE); - engine().refresh("recovery_finalization"); + getEngine().refresh("recovery_finalization"); startScheduledTasksIfNeeded(); engineConfig.setEnableGcDeletes(true); } @@ -978,7 +978,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett config.setIndexingBufferSize(shardIndexingBufferSize); - Engine engine = engineUnsafe(); + Engine engine = getEngineOrNull(); if (engine == null) { logger.debug("updateBufferSize: engine is closed; skipping"); return; @@ -1053,7 +1053,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett */ boolean shouldFlush() { if (disableFlush == false) { - Engine engine = engineUnsafe(); + Engine engine = getEngineOrNull(); if (engine != null) { try { Translog translog = engine.getTranslog(); @@ -1167,26 +1167,26 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett searchService.onRefreshSettings(settings); indexingService.onRefreshSettings(settings); if (change) { - engine().onSettingsChanged(); + getEngine().onSettingsChanged(); } } public Translog.View acquireTranslogView() { - Engine engine = engine(); + Engine engine = getEngine(); assert engine.getTranslog() != null : "translog must not be null"; return engine.getTranslog().newView(); } public List segments(boolean verbose) { - return engine().segments(verbose); + return getEngine().segments(verbose); } public void flushAndCloseEngine() throws IOException { - engine().flushAndClose(); + getEngine().flushAndClose(); } public Translog getTranslog() { - return engine().getTranslog(); + return getEngine().getTranslog(); } public PercolateStats percolateStats() { @@ -1197,7 +1197,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett @Override public void run() { // we check before if a refresh is needed, if not, we reschedule, otherwise, we fork, refresh, and then reschedule - if (!engine().refreshNeeded()) { + if (!getEngine().refreshNeeded()) { reschedule(); return; } @@ -1205,7 +1205,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett @Override public void run() { try { - if (engine().refreshNeeded()) { + if (getEngine().refreshNeeded()) { refresh("schedule"); } } catch (EngineClosedException e) { @@ -1318,8 +1318,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett recoveryState.getVerifyIndex().checkIndexTime(Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - timeNS))); } - Engine engine() { - Engine engine = engineUnsafe(); + Engine getEngine() { + Engine engine = getEngineOrNull(); if (engine == null) { throw new EngineClosedException(shardId); } @@ -1328,7 +1328,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett /** NOTE: returns null if engine is not yet started (e.g. recovery phase 1, copying over index files, is still running), or if engine is * closed. */ - protected Engine engineUnsafe() { + protected Engine getEngineOrNull() { return this.currentEngineReference.get(); } @@ -1462,7 +1462,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett */ public void sync(Translog.Location location) { try { - final Engine engine = engine(); + final Engine engine = getEngine(); engine.getTranslog().ensureSynced(location); } catch (EngineClosedException ex) { // that's fine since we already synced everything on engine close - this also is conform with the methods documentation diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java index 0dbbd12834e..091985e344e 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java @@ -490,7 +490,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements public SnapshotContext(SnapshotId snapshotId, ShardId shardId, IndexShardSnapshotStatus snapshotStatus) { super(snapshotId, Version.CURRENT, shardId); IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); - store = indexService.shard(shardId.id()).store(); + store = indexService.getShardOrNull(shardId.id()).store(); this.snapshotStatus = snapshotStatus; } @@ -774,7 +774,7 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements */ public RestoreContext(SnapshotId snapshotId, Version version, ShardId shardId, ShardId snapshotShardId, RecoveryState recoveryState) { super(snapshotId, version, shardId, snapshotShardId); - store = indicesService.indexServiceSafe(shardId.getIndex()).shard(shardId.id()).store(); + store = indicesService.indexServiceSafe(shardId.getIndex()).getShardOrNull(shardId.id()).store(); this.recoveryState = recoveryState; } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java index 9ee45b21def..2a82774a612 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java @@ -87,7 +87,7 @@ public final class IndicesWarmer extends AbstractComponent { if (indexService == null) { return; } - final IndexShard indexShard = indexService.shard(context.shardId().id()); + final IndexShard indexShard = indexService.getShardOrNull(context.shardId().id()); if (indexShard == null) { return; } diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index f2d39e1ac80..6bce5bc4be1 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -505,7 +505,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent { final SearchContext createContext(ShardSearchRequest request, @Nullable Engine.Searcher searcher) { IndexService indexService = indicesService.indexServiceSafe(request.index()); - IndexShard indexShard = indexService.shardSafe(request.shardId()); + IndexShard indexShard = indexService.getShard(request.shardId()); SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(), request.index(), request.shardId()); diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 3850888d848..c75189544c8 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -289,7 +289,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent shardEntry : entry.getValue().entrySet()) { final ShardId shardId = shardEntry.getKey(); try { - final IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).shard(shardId.id()); + final IndexShard indexShard = indicesService.indexServiceSafe(shardId.getIndex()).getShardOrNull(shardId.id()); executor.execute(new AbstractRunnable() { @Override public void doRun() { diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index f040ca229a5..de9eadaf057 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -158,7 +158,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { IndicesService indexServices = internalCluster().getInstance(IndicesService.class, node); IndexService indexShards = indexServices.indexServiceSafe(index); for (Integer shardId : indexShards.shardIds()) { - IndexShard shard = indexShards.shardSafe(shardId); + IndexShard shard = indexShards.getShard(shardId); if (randomBoolean()) { shard.failShard("test", new CorruptIndexException("test corrupted", "")); Set nodes = corruptedShardIDMap.get(shardId); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeReallyOldIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeReallyOldIndexIT.java index 4ada599f7d3..d365f5b4eeb 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeReallyOldIndexIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeReallyOldIndexIT.java @@ -65,7 +65,7 @@ public class UpgradeReallyOldIndexIT extends StaticIndexBackwardCompatibilityIT for (IndicesService services : internalCluster().getInstances(IndicesService.class)) { IndexService indexService = services.indexService(index); if (indexService != null) { - assertEquals(version, indexService.shard(0).minimumCompatibleVersion()); + assertEquals(version, indexService.getShardOrNull(0).minimumCompatibleVersion()); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 606911fae04..f672b2634bf 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -179,7 +179,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { DiscoveryNode discoveryNode = state.getNodes().get(nodeId); IndicesService indicesService = internalTestCluster.getInstance(IndicesService.class, discoveryNode.getName()); IndexService indexService = indicesService.indexService(shard.index()); - IndexShard indexShard = indexService.shard(shard.id()); + IndexShard indexShard = indexService.getShardOrNull(shard.id()); assertEquals(indexShard.shardPath().getRootDataPath().toString(), dataPath); } diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 0cbcaf9c2d3..a54be1766f8 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -150,7 +150,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { for (IndicesService service : internalCluster().getDataNodeInstances(IndicesService.class)) { if (service.hasIndex("foo-copy")) { - IndexShard shard = service.indexServiceSafe("foo-copy").shard(0); + IndexShard shard = service.indexServiceSafe("foo-copy").getShardOrNull(0); if (shard.routingEntry().primary()) { assertFalse(shard instanceof ShadowIndexShard); } else { diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java index 78705f54a91..1ed022dbefa 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java @@ -34,7 +34,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { public void testSettingsUpdate() { final IndexService service = createIndex("foo"); // INDEX_COMPOUND_ON_FLUSH - InternalEngine engine = ((InternalEngine) EngineAccess.engine(service.shard(0))); + InternalEngine engine = ((InternalEngine) EngineAccess.engine(service.getShardOrNull(0))); assertThat(engine.getCurrentIndexWriterConfig().getUseCompoundFile(), is(true)); client().admin().indices().prepareUpdateSettings("foo").setSettings(Settings.builder().put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, false).build()).get(); assertThat(engine.getCurrentIndexWriterConfig().getUseCompoundFile(), is(false)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 009bb4c7f81..a7314c2d27f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -433,7 +433,7 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { client().prepareIndex(index, "type").setSource("foo", "bar").get(); client().admin().indices().prepareRefresh(index).get(); Query query = indexService.mapperService().documentMapper("type").allFieldMapper().fieldType().termQuery("bar", null); - try (Searcher searcher = indexService.shard(0).acquireSearcher("tests")) { + try (Searcher searcher = indexService.getShardOrNull(0).acquireSearcher("tests")) { query = searcher.searcher().rewrite(query); final Class expected = boost ? AllTermQuery.class : TermQuery.class; assertThat(query, Matchers.instanceOf(expected)); diff --git a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index 831dc6c867a..3c3f1b44951 100644 --- a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -71,7 +71,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { QueryShardContext queryShardContext = new QueryShardContext(new Index("test"), queryParser); queryShardContext.setAllowUnmappedFields(true); Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext); - try (Engine.Searcher searcher = indexService.shardSafe(0).acquireSearcher("test")) { + try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery); BooleanQuery.Builder expected = new BooleanQuery.Builder(); diff --git a/core/src/test/java/org/elasticsearch/index/shard/EngineAccess.java b/core/src/test/java/org/elasticsearch/index/shard/EngineAccess.java index 58e4ddb67a5..9e5eb6c3705 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/EngineAccess.java +++ b/core/src/test/java/org/elasticsearch/index/shard/EngineAccess.java @@ -26,6 +26,6 @@ import org.elasticsearch.index.engine.Engine; public final class EngineAccess { public static Engine engine(IndexShard shard) { - return shard.engine(); + return shard.getEngine(); } } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 25a8bf2b40e..41aff8f4103 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -112,7 +112,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); assertEquals(initValue, shard.isFlushOnClose()); final boolean newValue = !initValue; assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_FLUSH_ON_CLOSE, newValue).build())); @@ -183,7 +183,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); NodeEnvironment env = getInstanceFromNode(NodeEnvironment.class); IndexService test = indicesService.indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); ShardStateMetaData shardStateMetaData = load(logger, env.availableShardPaths(shard.shardId)); assertEquals(getShardStateMetadata(shard), shardStateMetaData); ShardRouting routing = new ShardRouting(shard.shardRouting, shard.shardRouting.version() + 1); @@ -232,7 +232,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); NodeEnvironment env = getInstanceFromNode(NodeEnvironment.class); IndexService test = indicesService.indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); try { shard.deleteShardState(); fail("shard is active metadata delete must fail"); @@ -259,7 +259,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); NodeEnvironment env = getInstanceFromNode(NodeEnvironment.class); IndexService test = indicesService.indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); // fail shard shard.failShard("test shard fail", new CorruptIndexException("", "")); // check state file still exists @@ -304,7 +304,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen("test"); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexServiceSafe("test"); - IndexShard indexShard = indexService.shard(0); + IndexShard indexShard = indexService.getShardOrNull(0); client().admin().indices().prepareDelete("test").get(); assertThat(indexShard.getOperationsCount(), equalTo(0)); try { @@ -321,7 +321,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen("test"); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexServiceSafe("test"); - IndexShard indexShard = indexService.shard(0); + IndexShard indexShard = indexService.getShardOrNull(0); assertEquals(0, indexShard.getOperationsCount()); indexShard.incrementOperationCounter(); assertEquals(1, indexShard.getOperationsCount()); @@ -339,7 +339,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { client().prepareIndex("test", "test").setSource("{}").get(); ensureGreen("test"); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - indicesService.indexService("test").shard(0).markAsInactive(); + indicesService.indexService("test").getShardOrNull(0).markAsInactive(); assertBusy(new Runnable() { // should be very very quick @Override public void run() { @@ -366,31 +366,31 @@ public class IndexShardTests extends ESSingleNodeTestCase { client().prepareIndex("test", "bar", "1").setSource("{}").get(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); setDurability(shard, Translog.Durabilty.REQUEST); - assertFalse(shard.engine().getTranslog().syncNeeded()); + assertFalse(shard.getEngine().getTranslog().syncNeeded()); setDurability(shard, Translog.Durabilty.ASYNC); client().prepareIndex("test", "bar", "2").setSource("{}").get(); - assertTrue(shard.engine().getTranslog().syncNeeded()); + assertTrue(shard.getEngine().getTranslog().syncNeeded()); setDurability(shard, Translog.Durabilty.REQUEST); client().prepareDelete("test", "bar", "1").get(); - assertFalse(shard.engine().getTranslog().syncNeeded()); + assertFalse(shard.getEngine().getTranslog().syncNeeded()); setDurability(shard, Translog.Durabilty.ASYNC); client().prepareDelete("test", "bar", "2").get(); - assertTrue(shard.engine().getTranslog().syncNeeded()); + assertTrue(shard.getEngine().getTranslog().syncNeeded()); setDurability(shard, Translog.Durabilty.REQUEST); assertNoFailures(client().prepareBulk() .add(client().prepareIndex("test", "bar", "3").setSource("{}")) .add(client().prepareDelete("test", "bar", "1")).get()); - assertFalse(shard.engine().getTranslog().syncNeeded()); + assertFalse(shard.getEngine().getTranslog().syncNeeded()); setDurability(shard, Translog.Durabilty.ASYNC); assertNoFailures(client().prepareBulk() .add(client().prepareIndex("test", "bar", "4").setSource("{}")) .add(client().prepareDelete("test", "bar", "3")).get()); setDurability(shard, Translog.Durabilty.REQUEST); - assertTrue(shard.engine().getTranslog().syncNeeded()); + assertTrue(shard.getEngine().getTranslog().syncNeeded()); } private void setDurability(IndexShard shard, Translog.Durabilty durabilty) { @@ -407,12 +407,12 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); int numDocs = 1; shard.state = IndexShardState.RECOVERING; try { shard.recoveryState().getTranslog().totalOperations(1); - shard.engine().config().getTranslogRecoveryPerformer().performRecoveryOperation(shard.engine(), new Translog.DeleteByQuery(new Engine.DeleteByQuery(null, new BytesArray("{\"term\" : { \"user\" : \"kimchy\" }}"), null, null, null, Engine.Operation.Origin.RECOVERY, 0, "person")), false); + shard.getEngine().config().getTranslogRecoveryPerformer().performRecoveryOperation(shard.getEngine(), new Translog.DeleteByQuery(new Engine.DeleteByQuery(null, new BytesArray("{\"term\" : { \"user\" : \"kimchy\" }}"), null, null, null, Engine.Operation.Origin.RECOVERY, 0, "person")), false); assertTrue(version.onOrBefore(Version.V_1_0_0_Beta2)); numDocs = 0; } catch (ParsingException ex) { @@ -420,9 +420,9 @@ public class IndexShardTests extends ESSingleNodeTestCase { } finally { shard.state = IndexShardState.STARTED; } - shard.engine().refresh("foo"); + shard.getEngine().refresh("foo"); - try (Engine.Searcher searcher = shard.engine().acquireSearcher("foo")) { + try (Engine.Searcher searcher = shard.getEngine().acquireSearcher("foo")) { assertEquals(numDocs, searcher.reader().numDocs()); } } @@ -434,11 +434,11 @@ public class IndexShardTests extends ESSingleNodeTestCase { client().prepareIndex("test", "test").setSource("{}").get(); ensureGreen("test"); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - IndexShard test = indicesService.indexService("test").shard(0); + IndexShard test = indicesService.indexService("test").getShardOrNull(0); assertEquals(versionCreated.luceneVersion, test.minimumCompatibleVersion()); client().prepareIndex("test", "test").setSource("{}").get(); assertEquals(versionCreated.luceneVersion, test.minimumCompatibleVersion()); - test.engine().flush(); + test.getEngine().flush(); assertEquals(Version.CURRENT.luceneVersion, test.minimumCompatibleVersion()); } @@ -460,7 +460,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertHitCount(response, 1l); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); ShardPath shardPath = shard.shardPath(); Path dataPath = shardPath.getDataPath(); client().admin().indices().prepareClose("test").get(); @@ -580,7 +580,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); ShardStats stats = new ShardStats(shard.routingEntry(), shard.shardPath(), new CommonStats(shard, new CommonStatsFlags()), shard.commitStats()); assertEquals(shard.shardPath().getRootDataPath().toString(), stats.getDataPath()); assertEquals(shard.shardPath().getRootStatePath().toString(), stats.getStatePath()); @@ -619,7 +619,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("testpreindex"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); ShardIndexingService shardIndexingService = shard.indexingService(); final AtomicBoolean preIndexCalled = new AtomicBoolean(false); @@ -642,7 +642,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("testpostindex"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); ShardIndexingService shardIndexingService = shard.indexingService(); final AtomicBoolean postIndexCalled = new AtomicBoolean(false); @@ -665,7 +665,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("testpostindexwithexception"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); ShardIndexingService shardIndexingService = shard.indexingService(); shard.close("Unexpected close", true); @@ -700,7 +700,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, 1).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); @@ -709,25 +709,25 @@ public class IndexShardTests extends ESSingleNodeTestCase { Engine.Index index = new Engine.Index(new Term("_uid", "1"), doc); shard.index(index); assertTrue(shard.shouldFlush()); - assertEquals(2, shard.engine().getTranslog().totalOperations()); + assertEquals(2, shard.getEngine().getTranslog().totalOperations()); client().prepareIndex("test", "test", "2").setSource("{}").setRefresh(randomBoolean()).get(); assertBusy(() -> { // this is async assertFalse(shard.shouldFlush()); }); - assertEquals(0, shard.engine().getTranslog().totalOperations()); - shard.engine().getTranslog().sync(); - long size = shard.engine().getTranslog().sizeInBytes(); - logger.info("--> current translog size: [{}] num_ops [{}] generation [{}]", shard.engine().getTranslog().sizeInBytes(), shard.engine().getTranslog().totalOperations(), shard.engine().getTranslog().getGeneration()); + assertEquals(0, shard.getEngine().getTranslog().totalOperations()); + shard.getEngine().getTranslog().sync(); + long size = shard.getEngine().getTranslog().sizeInBytes(); + logger.info("--> current translog size: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, 1000) .put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(size, ByteSizeUnit.BYTES)) .build()).get(); client().prepareDelete("test", "test", "2").get(); - logger.info("--> translog size after delete: [{}] num_ops [{}] generation [{}]", shard.engine().getTranslog().sizeInBytes(), shard.engine().getTranslog().totalOperations(), shard.engine().getTranslog().getGeneration()); + logger.info("--> translog size after delete: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); assertBusy(() -> { // this is async - logger.info("--> translog size on iter : [{}] num_ops [{}] generation [{}]", shard.engine().getTranslog().sizeInBytes(), shard.engine().getTranslog().totalOperations(), shard.engine().getTranslog().getGeneration()); + logger.info("--> translog size on iter : [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); assertFalse(shard.shouldFlush()); }); - assertEquals(0, shard.engine().getTranslog().totalOperations()); + assertEquals(0, shard.getEngine().getTranslog().totalOperations()); } public void testStressMaybeFlush() throws Exception { @@ -735,7 +735,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); - final IndexShard shard = test.shard(0); + final IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexShard.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS, 1).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); @@ -778,7 +778,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); - final IndexShard shard = test.shard(0); + final IndexShard shard = test.getShardOrNull(0); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); if (randomBoolean()) { @@ -804,7 +804,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT); IndexService test = indicesService.indexService("test"); - final IndexShard shard = test.shard(0); + final IndexShard shard = test.getShardOrNull(0); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); if (randomBoolean()) { @@ -852,14 +852,14 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); IndexService test_target = indicesService.indexService("test_target"); - final IndexShard test_shard = test.shard(0); + final IndexShard test_shard = test.getShardOrNull(0); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); client().prepareIndex("test_target", "test", "1").setSource("{}").setRefresh(true).get(); assertHitCount(client().prepareSearch("test_target").get(), 1); assertSearchHits(client().prepareSearch("test_target").get(), "1"); client().admin().indices().prepareFlush("test").get(); // only flush test - final ShardRouting origRouting = test_target.shard(0).routingEntry(); + final ShardRouting origRouting = test_target.getShardOrNull(0).routingEntry(); ShardRouting routing = new ShardRouting(origRouting); ShardRoutingHelper.reinit(routing); routing = ShardRoutingHelper.newWithRestoreSource(routing, new RestoreSource(new SnapshotId("foo", "bar"), Version.CURRENT, "test")); @@ -912,7 +912,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexService("test"); - IndexShard shard = indexService.shard(0); + IndexShard shard = indexService.getShardOrNull(0); IndexSettingsService settingsService = indexService.settingsService(); assertTrue(settingsService.isRegistered(shard)); indexService.removeShard(0, "simon says so"); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index b1ed006d695..995deaca10c 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -137,8 +137,8 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { IndexService test = createIndex("test"); assertTrue(test.hasShard(0)); - ShardPath path = test.shard(0).shardPath(); - assertTrue(test.shard(0).routingEntry().started()); + ShardPath path = test.getShardOrNull(0).shardPath(); + assertTrue(test.getShardOrNull(0).routingEntry().started()); ShardPath shardPath = ShardPath.loadShardPath(logger, getNodeEnvironment(), new ShardId(test.index(), 0), test.getIndexSettings()); assertEquals(shardPath, path); try { diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index 06c2566fc21..1a4bf8fd3f7 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -42,7 +42,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); @@ -86,7 +86,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); @@ -106,7 +106,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); @@ -129,7 +129,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { public void testSyncFailsOnIndexClosedOrMissing() throws InterruptedException { createIndex("test"); IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener(); @@ -162,7 +162,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); @@ -195,7 +195,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); IndexService test = getInstanceFromNode(IndicesService.class).indexService("test"); - IndexShard shard = test.shard(0); + IndexShard shard = test.getShardOrNull(0); SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 6609ce8f66b..2eedceffcec 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -270,10 +270,10 @@ public class IndexRecoveryIT extends ESIntegTestCase { @Override public void run() { IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeA); - assertThat(indicesService.indexServiceSafe(INDEX_NAME).shardSafe(0).recoveryStats().currentAsSource(), + assertThat(indicesService.indexServiceSafe(INDEX_NAME).getShard(0).recoveryStats().currentAsSource(), equalTo(1)); indicesService = internalCluster().getInstance(IndicesService.class, nodeB); - assertThat(indicesService.indexServiceSafe(INDEX_NAME).shardSafe(0).recoveryStats().currentAsTarget(), + assertThat(indicesService.indexServiceSafe(INDEX_NAME).getShard(0).recoveryStats().currentAsTarget(), equalTo(1)); } }); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java index ed73a44c517..edb0f7b6a78 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java @@ -39,7 +39,7 @@ public class RecoveryStatusTests extends ESSingleNodeTestCase { public void testRenameTempFiles() throws IOException { IndexService service = createIndex("foo"); - IndexShard indexShard = service.shard(0); + IndexShard indexShard = service.getShardOrNull(0); DiscoveryNode node = new DiscoveryNode("foo", new LocalTransportAddress("bar"), Version.CURRENT); RecoveryStatus status = new RecoveryStatus(indexShard, node, new RecoveryTarget.RecoveryListener() { @Override diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java b/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java index b1d97b8b4ab..5a5f3163a4c 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java @@ -171,7 +171,7 @@ public class RecoveriesCollectionTests extends ESSingleNodeTestCase { long startRecovery(RecoveriesCollection collection, RecoveryTarget.RecoveryListener listener, TimeValue timeValue) { IndicesService indexServices = getInstanceFromNode(IndicesService.class); - IndexShard indexShard = indexServices.indexServiceSafe("test").shard(0); + IndexShard indexShard = indexServices.indexServiceSafe("test").getShardOrNull(0); final DiscoveryNode sourceNode = new DiscoveryNode("id", DummyTransportAddress.INSTANCE, Version.CURRENT); return collection.startRecovery(indexShard, sourceNode, listener, timeValue); } diff --git a/core/src/test/java/org/elasticsearch/test/TestSearchContext.java b/core/src/test/java/org/elasticsearch/test/TestSearchContext.java index b5a3e171c80..35b6ab2f835 100644 --- a/core/src/test/java/org/elasticsearch/test/TestSearchContext.java +++ b/core/src/test/java/org/elasticsearch/test/TestSearchContext.java @@ -108,7 +108,7 @@ public class TestSearchContext extends SearchContext { this.indexFieldDataService = indexService.fieldData(); this.fixedBitSetFilterCache = indexService.bitsetFilterCache(); this.threadPool = threadPool; - this.indexShard = indexService.shard(0); + this.indexShard = indexService.getShardOrNull(0); this.scriptService = scriptService; } From 623a519988d780f651fc29d1bce0dd8684e3124b Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Sat, 3 Oct 2015 15:21:15 +0200 Subject: [PATCH 30/35] also wrap searcher when it's used for Get calls --- .../java/org/elasticsearch/index/engine/Engine.java | 13 +++++++++---- .../elasticsearch/index/engine/InternalEngine.java | 6 ++++-- .../elasticsearch/index/engine/ShadowEngine.java | 5 +++-- .../org/elasticsearch/index/shard/IndexShard.java | 2 +- 4 files changed, 17 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index ce5f5178cfe..1330ef05a7f 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -26,7 +26,6 @@ import org.apache.lucene.search.SearcherManager; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; @@ -60,6 +59,8 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Function; +import java.util.function.Supplier; /** * @@ -227,8 +228,8 @@ public abstract class Engine implements Closeable { PENDING_OPERATIONS } - final protected GetResult getFromSearcher(Get get) throws EngineException { - final Searcher searcher = acquireSearcher("get"); + final protected GetResult getFromSearcher(Get get, Function searcherFactory) throws EngineException { + final Searcher searcher = searcherFactory.apply("get"); final Versions.DocIdAndVersion docIdAndVersion; try { docIdAndVersion = Versions.loadDocIdAndVersion(searcher.reader(), get.uid()); @@ -256,7 +257,11 @@ public abstract class Engine implements Closeable { } } - public abstract GetResult get(Get get) throws EngineException; + public final GetResult get(Get get) throws EngineException { + return get(get, this::acquireSearcher); + } + + public abstract GetResult get(Get get, Function searcherFactory) throws EngineException; /** * Returns a new searcher instance. The consumer of this diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 5b76040da5e..227212dd86e 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -66,6 +66,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Function; +import java.util.function.Supplier; /** * @@ -303,7 +305,7 @@ public class InternalEngine extends Engine { } @Override - public GetResult get(Get get) throws EngineException { + public GetResult get(Get get, Function searcherFactory) throws EngineException { try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); if (get.realtime()) { @@ -324,7 +326,7 @@ public class InternalEngine extends Engine { } // no version, get the version from the index, we know that we refresh on flush - return getFromSearcher(get); + return getFromSearcher(get, searcherFactory); } } diff --git a/core/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java b/core/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java index f589b289c17..7588ffae355 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.translog.Translog; import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.function.Function; /** * ShadowEngine is a specialized engine that only allows read-only operations @@ -168,9 +169,9 @@ public class ShadowEngine extends Engine { } @Override - public GetResult get(Get get) throws EngineException { + public GetResult get(Get get, Function searcherFacotry) throws EngineException { // There is no translog, so we can get it directly from the searcher - return getFromSearcher(get); + return getFromSearcher(get, searcherFacotry); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 582ded3a033..d0fbd1873f1 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -520,7 +520,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett public Engine.GetResult get(Engine.Get get) { readAllowed(); - return getEngine().get(get); + return getEngine().get(get, this::acquireSearcher); } public void refresh(String source) { From 4676eb19a4c622bd7b9a74c27cc9888744377150 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 5 Oct 2015 10:27:34 +0200 Subject: [PATCH 31/35] add tests for IndexSearcherWrapper --- .../index/shard/IndexSearcherWrapper.java | 8 +- .../elasticsearch/index/shard/IndexShard.java | 7 +- .../common/inject/ModuleTestCase.java | 20 ++++ .../elasticsearch/index/IndexModuleTests.java | 66 +++++++++++++ .../index/shard/IndexShardTests.java | 93 ++++++++++++++++++- 5 files changed, 187 insertions(+), 7 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/index/IndexModuleTests.java diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java b/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java index 9bc51f6f57b..c75f3c7995f 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java @@ -26,6 +26,8 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineException; +import java.io.IOException; + /** * Extension point to add custom functionality at request time to the {@link DirectoryReader} * and {@link IndexSearcher} managed by the {@link Engine}. @@ -37,7 +39,7 @@ public interface IndexSearcherWrapper { * @return a new directory reader wrapping the provided directory reader or if no wrapping was performed * the provided directory reader */ - DirectoryReader wrap(DirectoryReader reader); + DirectoryReader wrap(DirectoryReader reader) throws IOException; /** * @param engineConfig The engine config which can be used to get the query cache and query cache policy from @@ -46,7 +48,7 @@ public interface IndexSearcherWrapper { * @return a new index searcher wrapping the provided index searcher or if no wrapping was performed * the provided index searcher */ - IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) throws EngineException; + IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) throws IOException; /** * If there are configured {@link IndexSearcherWrapper} instances, the {@link IndexSearcher} of the provided engine searcher @@ -54,7 +56,7 @@ public interface IndexSearcherWrapper { * * This is invoked each time a {@link Engine.Searcher} is requested to do an operation. (for example search) */ - default Engine.Searcher wrap(EngineConfig engineConfig, Engine.Searcher engineSearcher) { + default Engine.Searcher wrap(EngineConfig engineConfig, Engine.Searcher engineSearcher) throws IOException { DirectoryReader reader = wrap((DirectoryReader) engineSearcher.reader()); IndexSearcher innerIndexSearcher = new IndexSearcher(reader); innerIndexSearcher.setQueryCache(engineConfig.getQueryCache()); diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index d0fbd1873f1..ea2d555ae0d 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -33,6 +33,7 @@ import org.elasticsearch.action.admin.indices.optimize.OptimizeRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; +import org.elasticsearch.bootstrap.Elasticsearch; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; @@ -738,7 +739,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndexSett public Engine.Searcher acquireSearcher(String source) { readAllowed(); Engine engine = getEngine(); - return searcherWrapper == null ? engine.acquireSearcher(source) : searcherWrapper.wrap(engineConfig, engine.acquireSearcher(source)); + try { + return searcherWrapper == null ? engine.acquireSearcher(source) : searcherWrapper.wrap(engineConfig, engine.acquireSearcher(source)); + } catch (IOException ex) { + throw new ElasticsearchException("failed to wrap searcher", ex); + } } public void close(String reason, boolean flushEngine) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java b/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java index eeac5463dbb..255def77eb2 100644 --- a/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/inject/ModuleTestCase.java @@ -60,6 +60,22 @@ public abstract class ModuleTestCase extends ESTestCase { fail("Did not find any binding to " + to.getName() + ". Found these bindings:\n" + s); } +// /** Configures the module and asserts "instance" is bound to "to". */ +// public void assertInstanceBinding(Module module, Class to, Object instance) { +// List elements = Elements.getElements(module); +// for (Element element : elements) { +// if (element instanceof ProviderInstanceBinding) { +// assertEquals(instance, ((ProviderInstanceBinding) element).getProviderInstance().get()); +// return; +// } +// } +// StringBuilder s = new StringBuilder(); +// for (Element element : elements) { +// s.append(element + "\n"); +// } +// fail("Did not find any binding to " + to.getName() + ". Found these bindings:\n" + s); +// } + /** * Attempts to configure the module, and asserts an {@link IllegalArgumentException} is * caught, containing the given messages @@ -164,6 +180,10 @@ public abstract class ModuleTestCase extends ESTestCase { return; } } + } else if (element instanceof ProviderInstanceBinding) { + ProviderInstanceBinding binding = (ProviderInstanceBinding) element; + assertTrue(tester.test(to.cast(binding.getProviderInstance().get()))); + return; } } StringBuilder s = new StringBuilder(); diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java new file mode 100644 index 00000000000..13957b79908 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index; + +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.search.IndexSearcher; +import org.elasticsearch.common.inject.ModuleTestCase; +import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.engine.EngineException; +import org.elasticsearch.index.engine.EngineFactory; +import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.shard.IndexSearcherWrapper; +import org.elasticsearch.test.engine.MockEngineFactory; + +public class IndexModuleTests extends ModuleTestCase { + + public void testWrapperIsBound() { + IndexModule module = new IndexModule(); + assertInstanceBinding(module, IndexSearcherWrapper.class,(x) -> x == null); + module.indexSearcherWrapper = Wrapper.class; + assertBinding(module, IndexSearcherWrapper.class, Wrapper.class); + } + + public void testEngineFactoryBound() { + IndexModule module = new IndexModule(); + assertBinding(module, EngineFactory.class, InternalEngineFactory.class); + module.engineFactoryImpl = MockEngineFactory.class; + assertBinding(module, EngineFactory.class, MockEngineFactory.class); + } + + public void testOtherServiceBound() { + IndexModule module = new IndexModule(); + assertBinding(module, IndexService.class, IndexService.class); + assertBinding(module, IndexServicesProvider.class, IndexServicesProvider.class); + } + + public static final class Wrapper implements IndexSearcherWrapper { + + @Override + public DirectoryReader wrap(DirectoryReader reader) { + return null; + } + + @Override + public IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) throws EngineException { + return null; + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 41aff8f4103..eb46f2d4e2a 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -20,9 +20,10 @@ package org.elasticsearch.index.shard; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; -import org.apache.lucene.index.CorruptIndexException; -import org.apache.lucene.index.IndexCommit; -import org.apache.lucene.index.Term; +import org.apache.lucene.index.*; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.util.Constants; @@ -58,7 +59,10 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexServicesProvider; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.indexing.IndexingOperationListener; import org.elasticsearch.index.indexing.ShardIndexingService; @@ -918,4 +922,87 @@ public class IndexShardTests extends ESSingleNodeTestCase { indexService.removeShard(0, "simon says so"); assertFalse(settingsService.isRegistered(shard)); } + + public void testSearcherWrapperIsUsed() throws IOException { + createIndex("test"); + ensureGreen(); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService indexService = indicesService.indexService("test"); + IndexShard shard = indexService.getShardOrNull(0); + client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}").setRefresh(randomBoolean()).get(); + client().prepareIndex("test", "test", "1").setSource("{\"foobar\" : \"bar\"}").setRefresh(true).get(); + + try (Engine.Searcher searcher = shard.acquireSearcher("test")) { + TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10); + assertEquals(search.totalHits, 1); + search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10); + assertEquals(search.totalHits, 1); + } + + ShardRouting routing = new ShardRouting(shard.routingEntry()); + shard.close("simon says", true); + IndexServicesProvider indexServices = indexService.getIndexServices(); + IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { + @Override + public DirectoryReader wrap(DirectoryReader reader) throws IOException { + return new FieldMaskingReader("foo", reader); + } + + @Override + public IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) throws EngineException { + return searcher; + } + }; + + IndexServicesProvider newProvider = new IndexServicesProvider(indexServices.getIndicesLifecycle(), indexServices.getThreadPool(), indexServices.getMapperService(), indexServices.getQueryParserService(), indexServices.getIndexCache(), indexServices.getIndexAliasesService(), indexServices.getIndicesQueryCache(), indexServices.getCodecService(), indexServices.getTermVectorsService(), indexServices.getIndexFieldDataService(), indexServices.getWarmer(), indexServices.getSimilarityService(), indexServices.getFactory(), indexServices.getBigArrays(), wrapper); + IndexShard newShard = new IndexShard(shard.shardId(), shard.indexSettings, shard.shardPath(), shard.store(), newProvider); + + ShardRoutingHelper.reinit(routing); + newShard.updateRoutingEntry(routing, false); + DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT); + assertTrue(newShard.recoverFromStore(routing, localNode)); + routing = new ShardRouting(routing); + ShardRoutingHelper.moveToStarted(routing); + newShard.updateRoutingEntry(routing, true); + try (Engine.Searcher searcher = newShard.acquireSearcher("test")) { + TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10); + assertEquals(search.totalHits, 0); + search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10); + assertEquals(search.totalHits, 1); + } + newShard.close("just do it", randomBoolean()); + } + + private static class FieldMaskingReader extends FilterDirectoryReader { + + + private final String field; + + public FieldMaskingReader(String field, DirectoryReader in) throws IOException { + super(in, new SubReaderWrapper() { + private final String filteredField = field; + @Override + public LeafReader wrap(LeafReader reader) { + return new FilterLeafReader(reader) { + @Override + public Fields fields() throws IOException { + return new FilterFields(super.fields()) { + @Override + public Terms terms(String field) throws IOException { + return filteredField.equals(field) ? null : super.terms(field); + } + }; + } + }; + } + }); + this.field = field; + + } + + @Override + protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { + return new FieldMaskingReader(field, in); + } + } } From efdecfa16109202ffba3c91b7c8236993d005766 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 5 Oct 2015 14:23:55 +0200 Subject: [PATCH 32/35] [TEST] Add tests to ensure that Get uses wrapped searcher / reader --- .../index/shard/IndexShardTests.java | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index eb46f2d4e2a..c1bdd9d2e7e 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -21,9 +21,7 @@ package org.elasticsearch.index.shard; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.*; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.*; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.util.Constants; @@ -69,6 +67,7 @@ import org.elasticsearch.index.indexing.ShardIndexingService; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.common.ParsingException; import org.elasticsearch.index.settings.IndexSettingsService; @@ -932,6 +931,10 @@ public class IndexShardTests extends ESSingleNodeTestCase { client().prepareIndex("test", "test", "0").setSource("{\"foo\" : \"bar\"}").setRefresh(randomBoolean()).get(); client().prepareIndex("test", "test", "1").setSource("{\"foobar\" : \"bar\"}").setRefresh(true).get(); + Engine.GetResult getResult = shard.get(new Engine.Get(false, new Term(UidFieldMapper.NAME, Uid.createUid("test", "1")))); + assertTrue(getResult.exists()); + assertNotNull(getResult.searcher()); + getResult.release(); try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10); assertEquals(search.totalHits, 1); @@ -970,14 +973,17 @@ public class IndexShardTests extends ESSingleNodeTestCase { search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10); assertEquals(search.totalHits, 1); } + getResult = newShard.get(new Engine.Get(false, new Term(UidFieldMapper.NAME, Uid.createUid("test", "1")))); + assertTrue(getResult.exists()); + assertNotNull(getResult.searcher()); // make sure get uses the wrapped reader + assertTrue(getResult.searcher().reader() instanceof FieldMaskingReader); + getResult.release(); newShard.close("just do it", randomBoolean()); } private static class FieldMaskingReader extends FilterDirectoryReader { - private final String field; - public FieldMaskingReader(String field, DirectoryReader in) throws IOException { super(in, new SubReaderWrapper() { private final String filteredField = field; From 3a0d1841d9dc99b752d13118ea303f1497c28fc0 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 5 Oct 2015 14:50:15 +0200 Subject: [PATCH 33/35] Query refactoring: simplify IndexQueryParserService parse methods and prepare the field for #13859 Relates to #13859 --- .../index/query/IndexQueryParserService.java | 42 +++++++++++++------ 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java b/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java index d4f7491fb11..bbd9f84e81e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java +++ b/core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java @@ -197,15 +197,6 @@ public class IndexQueryParserService extends AbstractIndexComponent { } } - @Nullable - public Query parseInnerQuery(QueryShardContext context) throws IOException { - Query query = context.parseContext().parseInnerQueryBuilder().toQuery(context); - if (query == null) { - query = Queries.newMatchNoDocsQuery(); - } - return query; - } - public QueryShardContext getShardContext() { return cache.get(); } @@ -258,16 +249,41 @@ public class IndexQueryParserService extends AbstractIndexComponent { context.reset(parser); try { context.parseFieldMatcher(parseFieldMatcher); - Query query = context.parseContext().parseInnerQueryBuilder().toQuery(context); - if (query == null) { - query = Queries.newMatchNoDocsQuery(); - } + Query query = parseInnerQuery(context); return new ParsedQuery(query, context.copyNamedQueries()); } finally { context.reset(null); } } + public Query parseInnerQuery(QueryShardContext context) throws IOException { + return toQuery(context.parseContext().parseInnerQueryBuilder(), context); + } + + public ParsedQuery toQuery(QueryBuilder queryBuilder) { + QueryShardContext context = cache.get(); + context.reset(); + context.parseFieldMatcher(parseFieldMatcher); + try { + Query query = toQuery(queryBuilder, context); + return new ParsedQuery(query, context.copyNamedQueries()); + } catch(QueryShardException | ParsingException e ) { + throw e; + } catch(Exception e) { + throw new QueryShardException(context, "failed to create query: {}", e, queryBuilder); + } finally { + context.reset(); + } + } + + private static Query toQuery(QueryBuilder queryBuilder, QueryShardContext context) throws IOException { + Query query = queryBuilder.toQuery(context); + if (query == null) { + query = Queries.newMatchNoDocsQuery(); + } + return query; + } + public ParseFieldMatcher parseFieldMatcher() { return parseFieldMatcher; } From e8653f51569580f23ca28a91106d90b07e989167 Mon Sep 17 00:00:00 2001 From: javanna Date: Mon, 5 Oct 2015 13:44:20 +0200 Subject: [PATCH 34/35] Java api: IdsQueryBuilder to accept only non null ids and types Types are still optional, but if you do provide them, they can't be null. Split the existing constructor that accepted nnull into two, one that accepts no arguments, and another one that accepts the types argument, which must be not null. Also trimmed down different ways of setting ids, some were misleading as they would always add the ids to the existing ones and not set them, the add prefix makes that clear. Left `addIds` method that accepts a varargs argument. Added check for ids not be null. --- .../index/query/IdsQueryBuilder.java | 50 +++++-------- .../index/query/QueryBuilders.java | 10 ++- .../index/query/IdsQueryBuilderTests.java | 16 ++++ .../search/child/ChildQuerySearchIT.java | 2 +- .../compress/SearchSourceCompressTests.java | 2 +- docs/reference/migration/migrate_3_0.asciidoc | 4 + .../messy/tests/SearchQueryTests.java | 74 +++---------------- 7 files changed, 59 insertions(+), 99 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java index b85db4b66b1..1de8db2e801 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.query; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; @@ -47,9 +46,19 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { static final IdsQueryBuilder PROTOTYPE = new IdsQueryBuilder(); /** - * Creates a new IdsQueryBuilder by optionally providing the types of the documents to look for + * Creates a new IdsQueryBuilder without providing the types of the documents to look for */ - public IdsQueryBuilder(@Nullable String... types) { + public IdsQueryBuilder() { + this.types = new String[0]; + } + + /** + * Creates a new IdsQueryBuilder by providing the types of the documents to look for + */ + public IdsQueryBuilder(String... types) { + if (types == null) { + throw new IllegalArgumentException("[ids] types cannot be null"); + } this.types = types; } @@ -64,32 +73,13 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { * Adds ids to the query. */ public IdsQueryBuilder addIds(String... ids) { + if (ids == null) { + throw new IllegalArgumentException("[ids] ids cannot be null"); + } Collections.addAll(this.ids, ids); return this; } - /** - * Adds ids to the query. - */ - public IdsQueryBuilder addIds(Collection ids) { - this.ids.addAll(ids); - return this; - } - - /** - * Adds ids to the filter. - */ - public IdsQueryBuilder ids(String... ids) { - return addIds(ids); - } - - /** - * Adds ids to the filter. - */ - public IdsQueryBuilder ids(Collection ids) { - return addIds(ids); - } - /** * Returns the ids for the query. */ @@ -100,13 +90,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); - if (types != null) { - if (types.length == 1) { - builder.field("type", types[0]); - } else { - builder.array("types", types); - } - } + builder.array("types", types); builder.startArray("values"); for (String value : ids) { builder.value(value); @@ -128,7 +112,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder { query = Queries.newMatchNoDocsQuery(); } else { Collection typesForQuery; - if (types == null || types.length == 0) { + if (types.length == 0) { typesForQuery = context.queryTypes(); } else if (types.length == 1 && MetaData.ALL.equals(types[0])) { typesForQuery = context.mapperService().types(); diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index df823e166f7..67e654cb0d8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.ShapeRelation; @@ -109,12 +108,19 @@ public abstract class QueryBuilders { return new DisMaxQueryBuilder(); } + /** + * Constructs a query that will match only specific ids within all types. + */ + public static IdsQueryBuilder idsQuery() { + return new IdsQueryBuilder(); + } + /** * Constructs a query that will match only specific ids within types. * * @param types The mapping/doc type */ - public static IdsQueryBuilder idsQuery(@Nullable String... types) { + public static IdsQueryBuilder idsQuery(String... types) { return new IdsQueryBuilder(types); } diff --git a/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java index 7ec3e3f8d25..177caf08711 100644 --- a/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/IdsQueryBuilderTests.java @@ -121,4 +121,20 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase return alternateVersions; } + + public void testIllegalArguments() { + try { + new IdsQueryBuilder((String[])null); + fail("must be not null"); + } catch(IllegalArgumentException e) { + //all good + } + + try { + new IdsQueryBuilder().addIds((String[])null); + fail("must be not null"); + } catch(IllegalArgumentException e) { + //all good + } + } } diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 335b584e12b..09f33f60ed5 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -181,7 +181,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { refresh(); // TEST FETCHING _parent from child - SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").ids("c1")).addFields("_parent").execute() + SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").addIds("c1")).addFields("_parent").execute() .actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1l)); diff --git a/core/src/test/java/org/elasticsearch/search/compress/SearchSourceCompressTests.java b/core/src/test/java/org/elasticsearch/search/compress/SearchSourceCompressTests.java index b6d335318ce..d3b5160db4f 100644 --- a/core/src/test/java/org/elasticsearch/search/compress/SearchSourceCompressTests.java +++ b/core/src/test/java/org/elasticsearch/search/compress/SearchSourceCompressTests.java @@ -84,7 +84,7 @@ public class SearchSourceCompressTests extends ESSingleNodeTestCase { assertThat(getResponse.getSourceAsBytes(), equalTo(buildSource(10000).bytes().toBytes())); for (int i = 1; i < 100; i++) { - SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.idsQuery("type1").ids(Integer.toString(i))).execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.idsQuery("type1").addIds(Integer.toString(i))).execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1l)); assertThat(searchResponse.getHits().getAt(0).source(), equalTo(buildSource(i).bytes().toBytes())); } diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index 6607a98f0fa..897098fdbb6 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -262,3 +262,7 @@ of string values: see `FilterFunctionScoreQuery.ScoreMode` and `CombineFunction` `CombineFunction.MULT` has been renamed to `MULTIPLY`. +==== IdsQueryBuilder + +For simplicity, only one way of adding the ids to the existing list (empty by default) is left: `addIds(String...)` + diff --git a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchQueryTests.java b/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchQueryTests.java index 89f0778f8ce..7b8ea710a00 100644 --- a/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchQueryTests.java +++ b/plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchQueryTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.MapperParsingException; @@ -677,25 +678,25 @@ public class SearchQueryTests extends ESIntegTestCase { client().prepareIndex("test", "type1", "2").setSource("field1", "value2"), client().prepareIndex("test", "type1", "3").setSource("field1", "value3")); - SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery("type1").ids("1", "3"))).get(); + SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery("type1").addIds("1", "3"))).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "1", "3"); // no type - searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().ids("1", "3"))).get(); + searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "1", "3"); - searchResponse = client().prepareSearch().setQuery(idsQuery("type1").ids("1", "3")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("1", "3")).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "1", "3"); // no type - searchResponse = client().prepareSearch().setQuery(idsQuery().ids("1", "3")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "3")).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "1", "3"); - searchResponse = client().prepareSearch().setQuery(idsQuery("type1").ids("7", "10")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("7", "10")).get(); assertHitCount(searchResponse, 0l); // repeat..., with terms @@ -1292,52 +1293,6 @@ public class SearchQueryTests extends ESIntegTestCase { assertHitCount(searchResponse, 0l); } - @Test - public void testBasicFilterById() throws Exception { - createIndex("test"); - - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); - client().prepareIndex("test", "type2", "2").setSource("field1", "value2").get(); - refresh(); - - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(idsQuery("type1").ids("1")).get(); - assertHitCount(searchResponse, 1l); - assertThat(searchResponse.getHits().hits().length, equalTo(1)); - - searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery("type1", "type2").ids("1", "2"))).get(); - assertHitCount(searchResponse, 2l); - assertThat(searchResponse.getHits().hits().length, equalTo(2)); - - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(idsQuery().ids("1")).get(); - assertHitCount(searchResponse, 1l); - assertThat(searchResponse.getHits().hits().length, equalTo(1)); - - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(idsQuery().ids("1", "2")).get(); - assertHitCount(searchResponse, 2l); - assertThat(searchResponse.getHits().hits().length, equalTo(2)); - - searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().ids("1", "2"))).get(); - assertHitCount(searchResponse, 2l); - assertThat(searchResponse.getHits().hits().length, equalTo(2)); - - searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery("type1").ids("1", "2"))).get(); - assertHitCount(searchResponse, 1l); - assertThat(searchResponse.getHits().hits().length, equalTo(1)); - - searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().ids("1"))).get(); - assertHitCount(searchResponse, 1l); - assertThat(searchResponse.getHits().hits().length, equalTo(1)); - - // TODO: why do we even support passing null?? - searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery((String[])null).ids("1"))).get(); - assertHitCount(searchResponse, 1l); - assertThat(searchResponse.getHits().hits().length, equalTo(1)); - - searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery("type1", "type2", "type3").ids("1", "2", "3", "4"))).get(); - assertHitCount(searchResponse, 2l); - assertThat(searchResponse.getHits().hits().length, equalTo(2)); - } - @Test public void testBasicQueryById() throws Exception { createIndex("test"); @@ -1346,32 +1301,27 @@ public class SearchQueryTests extends ESIntegTestCase { client().prepareIndex("test", "type2", "2").setSource("field1", "value2").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery("type1", "type2").ids("1", "2")).get(); + SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery("type1", "type2").addIds("1", "2")).get(); assertHitCount(searchResponse, 2l); assertThat(searchResponse.getHits().hits().length, equalTo(2)); - searchResponse = client().prepareSearch().setQuery(idsQuery().ids("1")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1")).get(); assertHitCount(searchResponse, 1l); assertThat(searchResponse.getHits().hits().length, equalTo(1)); - searchResponse = client().prepareSearch().setQuery(idsQuery().ids("1", "2")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2")).get(); assertHitCount(searchResponse, 2l); assertThat(searchResponse.getHits().hits().length, equalTo(2)); - - searchResponse = client().prepareSearch().setQuery(idsQuery("type1").ids("1", "2")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("1", "2")).get(); assertHitCount(searchResponse, 1l); assertThat(searchResponse.getHits().hits().length, equalTo(1)); - searchResponse = client().prepareSearch().setQuery(idsQuery().ids("1")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery(Strings.EMPTY_ARRAY).addIds("1")).get(); assertHitCount(searchResponse, 1l); assertThat(searchResponse.getHits().hits().length, equalTo(1)); - searchResponse = client().prepareSearch().setQuery(idsQuery((String[])null).ids("1")).get(); - assertHitCount(searchResponse, 1l); - assertThat(searchResponse.getHits().hits().length, equalTo(1)); - - searchResponse = client().prepareSearch().setQuery(idsQuery("type1", "type2", "type3").ids("1", "2", "3", "4")).get(); + searchResponse = client().prepareSearch().setQuery(idsQuery("type1", "type2", "type3").addIds("1", "2", "3", "4")).get(); assertHitCount(searchResponse, 2l); assertThat(searchResponse.getHits().hits().length, equalTo(2)); } From fce55a15b81923f7a584930293a9773c2886dddb Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Mon, 5 Oct 2015 17:33:03 +0200 Subject: [PATCH 35/35] Remove unneeded Module abstractions These abstractions don't really do anything nor can they be extended. We can just fold them into IndexModule for now. There are more but they are tricky due to some test dependencies which I need to resolve first. --- .../org/elasticsearch/index/IndexModule.java | 6 +++ .../aliases/IndexAliasesServiceModule.java | 33 ---------------- .../index/fielddata/IndexFieldDataModule.java | 38 ------------------- .../index/mapper/MapperServiceModule.java | 33 ---------------- .../elasticsearch/indices/IndicesService.java | 6 --- 5 files changed, 6 insertions(+), 110 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/index/aliases/IndexAliasesServiceModule.java delete mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataModule.java delete mode 100644 core/src/main/java/org/elasticsearch/index/mapper/MapperServiceModule.java diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index 0c70dd456ca..192984815d2 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -21,8 +21,11 @@ package org.elasticsearch.index; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.util.Providers; +import org.elasticsearch.index.aliases.IndexAliasesService; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.IndexSearcherWrapper; /** @@ -44,6 +47,9 @@ public class IndexModule extends AbstractModule { } bind(IndexService.class).asEagerSingleton(); bind(IndexServicesProvider.class).asEagerSingleton(); + bind(MapperService.class).asEagerSingleton(); + bind(IndexAliasesService.class).asEagerSingleton(); + bind(IndexFieldDataService.class).asEagerSingleton(); } diff --git a/core/src/main/java/org/elasticsearch/index/aliases/IndexAliasesServiceModule.java b/core/src/main/java/org/elasticsearch/index/aliases/IndexAliasesServiceModule.java deleted file mode 100644 index 1bb9a58d142..00000000000 --- a/core/src/main/java/org/elasticsearch/index/aliases/IndexAliasesServiceModule.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.aliases; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - * - */ -public class IndexAliasesServiceModule extends AbstractModule { - - @Override - protected void configure() { - bind(IndexAliasesService.class).asEagerSingleton(); - } -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataModule.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataModule.java deleted file mode 100644 index e68ff4cf2c3..00000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataModule.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.fielddata; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.settings.Settings; - -/** - */ -public class IndexFieldDataModule extends AbstractModule { - - private final Settings settings; - - public IndexFieldDataModule(Settings settings) { - this.settings = settings; - } - - @Override - protected void configure() { - bind(IndexFieldDataService.class).asEagerSingleton(); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperServiceModule.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperServiceModule.java deleted file mode 100644 index e742992a902..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperServiceModule.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.common.inject.AbstractModule; - -/** - * - */ -public class MapperServiceModule extends AbstractModule { - - @Override - protected void configure() { - bind(MapperService.class).asEagerSingleton(); - } -} diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index e2448670f83..ae69eee8c8d 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -53,18 +53,15 @@ import org.elasticsearch.index.IndexNameModule; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.LocalNodeIdModule; -import org.elasticsearch.index.aliases.IndexAliasesServiceModule; import org.elasticsearch.index.analysis.AnalysisModule; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.IndexCacheModule; -import org.elasticsearch.index.fielddata.IndexFieldDataModule; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MapperServiceModule; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.recovery.RecoveryStats; @@ -343,9 +340,6 @@ public class IndicesService extends AbstractLifecycleComponent i modules.add(new AnalysisModule(indexSettings, indicesAnalysisService)); modules.add(new SimilarityModule(indexSettings)); modules.add(new IndexCacheModule(indexSettings)); - modules.add(new IndexFieldDataModule(indexSettings)); - modules.add(new MapperServiceModule()); - modules.add(new IndexAliasesServiceModule()); modules.add(new IndexModule()); pluginsService.processModules(modules);