mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-27 10:28:28 +00:00
Merge remote-tracking branch 'upstream/master' into index-lifecycle
This commit is contained in:
commit
09067c8942
@ -46,7 +46,6 @@
|
||||
|
||||
<!-- Hopefully temporary suppression of LineLength on files that don't pass it. We should remove these when we the
|
||||
files start to pass. -->
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]search[/\\]vectorhighlight[/\\]CustomFieldQuery.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]health[/\\]ClusterHealthRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]health[/\\]TransportClusterHealthAction.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]hotthreads[/\\]NodesHotThreadsRequestBuilder.java" checks="LineLength" />
|
||||
@ -229,13 +228,7 @@
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]spi[/\\]InjectionPoint.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]io[/\\]Channels.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]joda[/\\]Joda.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]Lucene.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]ElasticsearchDirectoryReader.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]FilterableTermsEnum.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]FreqTermsEnum.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]search[/\\]XMoreLikeThis.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]search[/\\]function[/\\]FunctionScoreQuery.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]store[/\\]ByteArrayIndexInput.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]Cidrs.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]NetworkService.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]recycler[/\\]Recyclers.java" checks="LineLength" />
|
||||
@ -387,7 +380,6 @@
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotShardsService.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotsService.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPool.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]queries[/\\]BlendedTermQueryTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]VersionTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]RejectionActionIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]HotThreadsIT.java" checks="LineLength" />
|
||||
@ -494,7 +486,6 @@
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]breaker[/\\]MemoryCircuitBreakerTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]geo[/\\]ShapeBuilderTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]hash[/\\]MessageDigestsTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]FreqTermsEnumTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]CidrsTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]DistanceUnitTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]FuzzinessTests.java" checks="LineLength" />
|
||||
@ -665,17 +656,8 @@
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]update[/\\]UpdateIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]validate[/\\]SimpleValidateQueryIT.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]versioning[/\\]SimpleVersioningIT.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionScriptEngine.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]MoreExpressionTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]StoredExpressionTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]ContextExampleTests.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]reindex[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]reindex[/\\]TransportUpdateByQueryAction.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]analysis-icu[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]IcuCollationTokenFilterFactory.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]analysis-icu[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]IcuFoldingTokenFilterFactory.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]analysis-icu[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]IndexableBinaryStringTools.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]analysis-kuromoji[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]JapaneseStopTokenFilterFactory.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]analysis-kuromoji[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]KuromojiAnalysisTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]analysis-phonetic[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]PhoneticTokenFilterFactory.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]discovery-ec2[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]ec2[/\\]AbstractAwsTestCase.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]discovery-ec2[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]ec2[/\\]AmazonEC2Mock.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]mapper-murmur3[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]murmur3[/\\]Murmur3FieldMapper.java" checks="LineLength" />
|
||||
@ -683,5 +665,4 @@
|
||||
<suppress files="plugins[/\\]repository-hdfs[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]hdfs[/\\]HdfsTests.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]S3Repository.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]AmazonS3Wrapper.java" checks="LineLength" />
|
||||
<suppress files="plugins[/\\]store-smb[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]SmbDirectoryWrapper.java" checks="LineLength" />
|
||||
</suppressions>
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -72,6 +73,7 @@ public final class ScriptProcessor extends AbstractProcessor {
|
||||
public IngestDocument execute(IngestDocument document) {
|
||||
IngestScript.Factory factory = scriptService.compile(script, IngestScript.CONTEXT);
|
||||
factory.newInstance(script.getParams()).execute(document.getSourceAndMetadata());
|
||||
CollectionUtils.ensureNoSelfReferences(document.getSourceAndMetadata(), "ingest script");
|
||||
return document;
|
||||
}
|
||||
|
||||
|
@ -455,7 +455,8 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
|
||||
// TODO: document and/or error if params contains _score?
|
||||
// NOTE: by checking for the variable in params first, it allows masking document fields with a global constant,
|
||||
// but if we were to reverse it, we could provide a way to supply dynamic defaults for documents missing the field?
|
||||
private static void bindFromParams(@Nullable final Map<String, Object> params, final SimpleBindings bindings, final String variable) throws ParseException {
|
||||
private static void bindFromParams(@Nullable final Map<String, Object> params,
|
||||
final SimpleBindings bindings, final String variable) throws ParseException {
|
||||
// NOTE: by checking for the variable in vars first, it allows masking document fields with a global constant,
|
||||
// but if we were to reverse it, we could provide a way to supply dynamic defaults for documents missing the field?
|
||||
Object value = params.get(variable);
|
||||
|
@ -120,7 +120,8 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
||||
client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"),
|
||||
client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"),
|
||||
client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye"));
|
||||
ScoreFunctionBuilder<?> score = ScoreFunctionBuilders.scriptFunction(new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap()));
|
||||
ScoreFunctionBuilder<?> score = ScoreFunctionBuilders.scriptFunction(
|
||||
new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap()));
|
||||
SearchRequestBuilder req = client().prepareSearch().setIndices("test");
|
||||
req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE));
|
||||
req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent
|
||||
@ -190,7 +191,10 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testMultiValueMethods() throws Exception {
|
||||
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double", "double2", "type=double"));
|
||||
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc",
|
||||
"double0", "type=double",
|
||||
"double1", "type=double",
|
||||
"double2", "type=double"));
|
||||
ensureGreen("test");
|
||||
|
||||
Map<String, Object> doc1 = new HashMap<>();
|
||||
|
@ -65,8 +65,8 @@ public class StoredExpressionTests extends ESIntegTestCase {
|
||||
}
|
||||
try {
|
||||
client().prepareSearch()
|
||||
.setSource(
|
||||
new SearchSourceBuilder().scriptField("test1", new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())))
|
||||
.setSource(new SearchSourceBuilder().scriptField("test1",
|
||||
new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())))
|
||||
.setIndices("test").setTypes("scriptTest").get();
|
||||
fail("search script should have been rejected");
|
||||
} catch(Exception e) {
|
||||
|
@ -36,12 +36,16 @@ import com.ibm.icu.util.ULocale;
|
||||
|
||||
/**
|
||||
* An ICU based collation token filter. There are two ways to configure collation:
|
||||
* <p>The first is simply specifying the locale (defaults to the default locale). The {@code language}
|
||||
* parameter is the lowercase two-letter ISO-639 code. An additional {@code country} and {@code variant}
|
||||
* <p>The first is simply specifying the locale (defaults to the default
|
||||
* locale). The {@code language} parameter is the lowercase two-letter
|
||||
* ISO-639 code. An additional {@code country} and {@code variant}
|
||||
* can be provided.
|
||||
* <p>The second option is to specify collation rules as defined in the <a href="http://www.icu-project.org/userguide/Collate_Customization.html">
|
||||
* Collation customization</a> chapter in icu docs. The {@code rules} parameter can either embed the rules definition
|
||||
* in the settings or refer to an external location (preferable located under the {@code config} location, relative to it).
|
||||
* <p>The second option is to specify collation rules as defined in the
|
||||
* <a href="http://www.icu-project.org/userguide/Collate_Customization.html">
|
||||
* Collation customization</a> chapter in icu docs. The {@code rules}
|
||||
* parameter can either embed the rules definition
|
||||
* in the settings or refer to an external location (preferable located under
|
||||
* the {@code config} location, relative to it).
|
||||
*/
|
||||
public class IcuCollationTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
|
@ -32,10 +32,12 @@ import org.elasticsearch.index.IndexSettings;
|
||||
* Uses the {@link org.apache.lucene.analysis.icu.ICUFoldingFilter}.
|
||||
* Applies foldings from UTR#30 Character Foldings.
|
||||
* <p>
|
||||
* Can be filtered to handle certain characters in a specified way (see http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html)
|
||||
* Can be filtered to handle certain characters in a specified way
|
||||
* (see http://icu-project.org/apiref/icu4j/com/ibm/icu/text/UnicodeSet.html)
|
||||
* E.g national chars that should be retained (filter : "[^åäöÅÄÖ]").
|
||||
*
|
||||
* <p>The {@code unicodeSetFilter} attribute can be used to provide the UniCodeSet for filtering.
|
||||
* <p>The {@code unicodeSetFilter} attribute can be used to provide the
|
||||
* UniCodeSet for filtering.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
|
@ -133,7 +133,10 @@ public final class IndexableBinaryStringTools {
|
||||
codingCase = CODING_CASES[caseNum];
|
||||
|
||||
if (inputByteNum + 1 < inputLength) { // codingCase.numBytes must be 3
|
||||
outputArray[outputCharNum++] = (char) ((((inputArray[inputByteNum] & 0xFF) << codingCase.initialShift) + ((inputArray[inputByteNum + 1] & 0xFF) << codingCase.middleShift)) & (short) 0x7FFF);
|
||||
outputArray[outputCharNum++] = (char) (
|
||||
( ((inputArray[inputByteNum] & 0xFF) << codingCase.initialShift)
|
||||
+ ((inputArray[inputByteNum + 1] & 0xFF) << codingCase.middleShift)
|
||||
) & (short) 0x7FFF);
|
||||
// Add trailing char containing the number of full bytes in final char
|
||||
outputArray[outputCharNum++] = (char) 1;
|
||||
} else if (inputByteNum < inputLength) {
|
||||
|
@ -47,7 +47,8 @@ public class JapaneseStopTokenFilterFactory extends AbstractTokenFilterFactory{
|
||||
super(indexSettings, name, settings);
|
||||
this.ignoreCase = settings.getAsBoolean("ignore_case", false);
|
||||
this.removeTrailing = settings.getAsBoolean("remove_trailing", true);
|
||||
this.stopWords = Analysis.parseWords(env, settings, "stopwords", JapaneseAnalyzer.getDefaultStopSet(), NAMED_STOP_WORDS, ignoreCase);
|
||||
this.stopWords = Analysis.parseWords(env, settings, "stopwords",
|
||||
JapaneseAnalyzer.getDefaultStopSet(), NAMED_STOP_WORDS, ignoreCase);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -139,7 +139,8 @@ public class KuromojiAnalysisTests extends ESTestCase {
|
||||
|
||||
// パーティー should be stemmed by default
|
||||
// (min len) コピー should not be stemmed
|
||||
String[] expected_tokens_katakana = new String[]{"明後日", "パーティ", "に", "行く", "予定", "が", "ある", "図書館", "で", "資料", "を", "コピー", "し", "まし", "た"};
|
||||
String[] expected_tokens_katakana = new String[] {
|
||||
"明後日", "パーティ", "に", "行く", "予定", "が", "ある", "図書館", "で", "資料", "を", "コピー", "し", "まし", "た"};
|
||||
assertSimpleTSOutput(tokenFilter.create(tokenizer), expected_tokens_katakana);
|
||||
|
||||
tokenFilter = analysis.tokenFilter.get("kuromoji_ks");
|
||||
@ -149,7 +150,8 @@ public class KuromojiAnalysisTests extends ESTestCase {
|
||||
|
||||
// パーティー should not be stemmed since min len == 6
|
||||
// コピー should not be stemmed
|
||||
expected_tokens_katakana = new String[]{"明後日", "パーティー", "に", "行く", "予定", "が", "ある", "図書館", "で", "資料", "を", "コピー", "し", "まし", "た"};
|
||||
expected_tokens_katakana = new String[] {
|
||||
"明後日", "パーティー", "に", "行く", "予定", "が", "ある", "図書館", "で", "資料", "を", "コピー", "し", "まし", "た"};
|
||||
assertSimpleTSOutput(tokenFilter.create(tokenizer), expected_tokens_katakana);
|
||||
}
|
||||
|
||||
|
@ -82,7 +82,9 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
} else if ("double_metaphone".equalsIgnoreCase(encodername) || "doubleMetaphone".equalsIgnoreCase(encodername)) {
|
||||
this.encoder = null;
|
||||
this.maxcodelength = settings.getAsInt("max_code_len", 4);
|
||||
} else if ("bm".equalsIgnoreCase(encodername) || "beider_morse".equalsIgnoreCase(encodername) || "beidermorse".equalsIgnoreCase(encodername)) {
|
||||
} else if ("bm".equalsIgnoreCase(encodername)
|
||||
|| "beider_morse".equalsIgnoreCase(encodername)
|
||||
|| "beidermorse".equalsIgnoreCase(encodername)) {
|
||||
this.encoder = null;
|
||||
this.languageset = settings.getAsList("languageset");
|
||||
String ruleType = settings.get("rule_type", "approx");
|
||||
|
@ -60,18 +60,22 @@ public final class SmbDirectoryWrapper extends FilterDirectory {
|
||||
static final int CHUNK_SIZE = 8192;
|
||||
|
||||
SmbFSIndexOutput(String name) throws IOException {
|
||||
super("SmbFSIndexOutput(path=\"" + fsDirectory.getDirectory().resolve(name) + "\")", name, new FilterOutputStream(Channels.newOutputStream(Files.newByteChannel(fsDirectory.getDirectory().resolve(name), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.READ, StandardOpenOption.WRITE))) {
|
||||
// This implementation ensures, that we never write more than CHUNK_SIZE bytes:
|
||||
@Override
|
||||
public void write(byte[] b, int offset, int length) throws IOException {
|
||||
while (length > 0) {
|
||||
final int chunk = Math.min(length, CHUNK_SIZE);
|
||||
out.write(b, offset, chunk);
|
||||
length -= chunk;
|
||||
offset += chunk;
|
||||
}
|
||||
}
|
||||
}, CHUNK_SIZE);
|
||||
super("SmbFSIndexOutput(path=\"" + fsDirectory.getDirectory().resolve(name) + "\")", name,
|
||||
new FilterOutputStream(Channels.newOutputStream(Files.newByteChannel(fsDirectory.getDirectory().resolve(name),
|
||||
StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING,
|
||||
StandardOpenOption.READ, StandardOpenOption.WRITE))) {
|
||||
// This implementation ensures, that we never write more than CHUNK_SIZE bytes:
|
||||
@Override
|
||||
public void write(byte[] b, int offset, int length) throws IOException {
|
||||
while (length > 0) {
|
||||
final int chunk = Math.min(length, CHUNK_SIZE);
|
||||
out.write(b, offset, chunk);
|
||||
length -= chunk;
|
||||
offset += chunk;
|
||||
}
|
||||
}
|
||||
},
|
||||
CHUNK_SIZE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -95,7 +95,8 @@ public class CustomFieldQuery extends FieldQuery {
|
||||
}
|
||||
}
|
||||
|
||||
private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, Term[][] terms, int[] pos, IndexReader reader, Collection<Query> flatQueries) throws IOException {
|
||||
private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, Term[][] terms, int[] pos,
|
||||
IndexReader reader, Collection<Query> flatQueries) throws IOException {
|
||||
if (currentPos == 0) {
|
||||
// if we have more than 16 terms
|
||||
int numTerms = 0;
|
||||
|
@ -259,12 +259,16 @@ public class MetaDataUpdateSettingsService extends AbstractComponent {
|
||||
IndexMetaData indexMetaData = metaDataBuilder.get(index);
|
||||
if (indexMetaData != null) {
|
||||
if (Version.CURRENT.equals(indexMetaData.getCreationVersion()) == false) {
|
||||
// No reason to pollute the settings, we didn't really upgrade anything
|
||||
metaDataBuilder.put(IndexMetaData.builder(indexMetaData)
|
||||
.settings(Settings.builder().put(indexMetaData.getSettings())
|
||||
.put(IndexMetaData.SETTING_VERSION_UPGRADED, entry.getValue().v1())
|
||||
)
|
||||
);
|
||||
// no reason to pollute the settings, we didn't really upgrade anything
|
||||
metaDataBuilder.put(
|
||||
IndexMetaData
|
||||
.builder(indexMetaData)
|
||||
.settings(
|
||||
Settings
|
||||
.builder()
|
||||
.put(indexMetaData.getSettings())
|
||||
.put(IndexMetaData.SETTING_VERSION_UPGRADED, entry.getValue().v1()))
|
||||
.settingsVersion(1 + indexMetaData.getSettingsVersion()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -592,7 +592,8 @@ public class Lucene {
|
||||
out.writeString(sortField.getField());
|
||||
}
|
||||
if (sortField.getComparatorSource() != null) {
|
||||
IndexFieldData.XFieldComparatorSource comparatorSource = (IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource();
|
||||
IndexFieldData.XFieldComparatorSource comparatorSource =
|
||||
(IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource();
|
||||
writeSortType(out, comparatorSource.reducedType());
|
||||
writeMissingValue(out, comparatorSource.missingValue(sortField.getReverse()));
|
||||
} else {
|
||||
|
@ -36,7 +36,8 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
|
||||
private final ShardId shardId;
|
||||
private final FilterDirectoryReader.SubReaderWrapper wrapper;
|
||||
|
||||
private ElasticsearchDirectoryReader(DirectoryReader in, FilterDirectoryReader.SubReaderWrapper wrapper, ShardId shardId) throws IOException {
|
||||
private ElasticsearchDirectoryReader(DirectoryReader in, FilterDirectoryReader.SubReaderWrapper wrapper,
|
||||
ShardId shardId) throws IOException {
|
||||
super(in, wrapper);
|
||||
this.wrapper = wrapper;
|
||||
this.shardId = shardId;
|
||||
@ -84,16 +85,19 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given listener to the provided directory reader. The reader must contain an {@link ElasticsearchDirectoryReader} in it's hierarchy
|
||||
* Adds the given listener to the provided directory reader. The reader
|
||||
* must contain an {@link ElasticsearchDirectoryReader} in it's hierarchy
|
||||
* otherwise we can't safely install the listener.
|
||||
*
|
||||
* @throws IllegalArgumentException if the reader doesn't contain an {@link ElasticsearchDirectoryReader} in it's hierarchy
|
||||
* @throws IllegalArgumentException if the reader doesn't contain an
|
||||
* {@link ElasticsearchDirectoryReader} in it's hierarchy
|
||||
*/
|
||||
@SuppressForbidden(reason = "This is the only sane way to add a ReaderClosedListener")
|
||||
public static void addReaderCloseListener(DirectoryReader reader, IndexReader.ClosedListener listener) {
|
||||
ElasticsearchDirectoryReader elasticsearchDirectoryReader = getElasticsearchDirectoryReader(reader);
|
||||
if (elasticsearchDirectoryReader == null) {
|
||||
throw new IllegalArgumentException("Can't install close listener reader is not an ElasticsearchDirectoryReader/ElasticsearchLeafReader");
|
||||
throw new IllegalArgumentException(
|
||||
"Can't install close listener reader is not an ElasticsearchDirectoryReader/ElasticsearchLeafReader");
|
||||
}
|
||||
IndexReader.CacheHelper cacheHelper = elasticsearchDirectoryReader.getReaderCacheHelper();
|
||||
if (cacheHelper == null) {
|
||||
@ -104,7 +108,9 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to unwrap the given reader until the first {@link ElasticsearchDirectoryReader} instance is found or <code>null</code> if no instance is found;
|
||||
* Tries to unwrap the given reader until the first
|
||||
* {@link ElasticsearchDirectoryReader} instance is found or {@code null}
|
||||
* if no instance is found.
|
||||
*/
|
||||
public static ElasticsearchDirectoryReader getElasticsearchDirectoryReader(DirectoryReader reader) {
|
||||
if (reader instanceof FilterDirectoryReader) {
|
||||
|
@ -61,7 +61,8 @@ public class FilterableTermsEnum extends TermsEnum {
|
||||
}
|
||||
}
|
||||
|
||||
static final String UNSUPPORTED_MESSAGE = "This TermsEnum only supports #seekExact(BytesRef) as well as #docFreq() and #totalTermFreq()";
|
||||
static final String UNSUPPORTED_MESSAGE =
|
||||
"This TermsEnum only supports #seekExact(BytesRef) as well as #docFreq() and #totalTermFreq()";
|
||||
protected static final int NOT_FOUND = -1;
|
||||
private final Holder[] enums;
|
||||
protected int currentDocFreq = 0;
|
||||
|
@ -47,7 +47,8 @@ public class FreqTermsEnum extends FilterableTermsEnum implements Releasable {
|
||||
private final boolean needTotalTermFreqs;
|
||||
|
||||
|
||||
public FreqTermsEnum(IndexReader reader, String field, boolean needDocFreq, boolean needTotalTermFreq, @Nullable Query filter, BigArrays bigArrays) throws IOException {
|
||||
public FreqTermsEnum(IndexReader reader, String field, boolean needDocFreq, boolean needTotalTermFreq,
|
||||
@Nullable Query filter, BigArrays bigArrays) throws IOException {
|
||||
super(reader, field, needTotalTermFreq ? PostingsEnum.FREQS : PostingsEnum.NONE, filter);
|
||||
this.bigArrays = bigArrays;
|
||||
this.needDocFreqs = needDocFreq;
|
||||
|
@ -296,7 +296,8 @@ public class FunctionScoreQuery extends Query {
|
||||
List<Explanation> functionsExplanations = new ArrayList<>();
|
||||
for (int i = 0; i < functions.length; ++i) {
|
||||
if (filterWeights[i] != null) {
|
||||
final Bits docSet = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterWeights[i].scorerSupplier(context));
|
||||
final Bits docSet = Lucene.asSequentialAccessBits(
|
||||
context.reader().maxDoc(), filterWeights[i].scorerSupplier(context));
|
||||
if (docSet.get(doc) == false) {
|
||||
continue;
|
||||
}
|
||||
@ -354,7 +355,8 @@ public class FunctionScoreQuery extends Query {
|
||||
private final boolean needsScores;
|
||||
|
||||
private FunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, ScoreFunction[] functions,
|
||||
float maxBoost, LeafScoreFunction[] leafFunctions, Bits[] docSets, CombineFunction scoreCombiner, boolean needsScores) throws IOException {
|
||||
float maxBoost, LeafScoreFunction[] leafFunctions, Bits[] docSets,
|
||||
CombineFunction scoreCombiner, boolean needsScores) throws IOException {
|
||||
super(scorer, w);
|
||||
this.scoreMode = scoreMode;
|
||||
this.functions = functions;
|
||||
|
@ -75,7 +75,8 @@ public class ByteArrayIndexInput extends IndexInput {
|
||||
if (offset >= 0L && length >= 0L && offset + length <= this.length) {
|
||||
return new ByteArrayIndexInput(sliceDescription, bytes, this.offset + (int)offset, (int)length);
|
||||
} else {
|
||||
throw new IllegalArgumentException("slice() " + sliceDescription + " out of bounds: offset=" + offset + ",length=" + length + ",fileLength=" + this.length + ": " + this);
|
||||
throw new IllegalArgumentException("slice() " + sliceDescription + " out of bounds: offset=" + offset
|
||||
+ ",length=" + length + ",fileLength=" + this.length + ": " + this);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,6 +19,15 @@
|
||||
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectArrayList;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefArray;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.InPlaceMergeSorter;
|
||||
import org.apache.lucene.util.IntroSorter;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Iterators;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.AbstractList;
|
||||
import java.util.ArrayList;
|
||||
@ -35,14 +44,6 @@ import java.util.Objects;
|
||||
import java.util.RandomAccess;
|
||||
import java.util.Set;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectArrayList;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefArray;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.InPlaceMergeSorter;
|
||||
import org.apache.lucene.util.IntroSorter;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
/** Collections-related utility methods. */
|
||||
public class CollectionUtils {
|
||||
|
||||
@ -246,7 +247,8 @@ public class CollectionUtils {
|
||||
return null;
|
||||
}
|
||||
if (value instanceof Map) {
|
||||
return ((Map<?,?>) value).values();
|
||||
Map<?,?> map = (Map<?,?>) value;
|
||||
return () -> Iterators.concat(map.keySet().iterator(), map.values().iterator());
|
||||
} else if ((value instanceof Iterable) && (value instanceof Path == false)) {
|
||||
return (Iterable<?>) value;
|
||||
} else if (value instanceof Object[]) {
|
||||
|
@ -41,7 +41,6 @@ import org.apache.lucene.search.similarities.BM25Similarity;
|
||||
import org.apache.lucene.search.similarities.ClassicSimilarity;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -125,9 +124,9 @@ public class BlendedTermQueryTests extends ESTestCase {
|
||||
for (int j = 0; j < iters; j++) {
|
||||
String[] fields = new String[1 + random().nextInt(10)];
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
fields[i] = TestUtil.randomRealisticUnicodeString(random(), 1, 10);
|
||||
fields[i] = randomRealisticUnicodeOfLengthBetween(1, 10);
|
||||
}
|
||||
String term = TestUtil.randomRealisticUnicodeString(random(), 1, 10);
|
||||
String term = randomRealisticUnicodeOfLengthBetween(1, 10);
|
||||
Term[] terms = toTerms(fields, term);
|
||||
float tieBreaker = random().nextFloat();
|
||||
BlendedTermQuery query = BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker);
|
||||
@ -159,7 +158,7 @@ public class BlendedTermQueryTests extends ESTestCase {
|
||||
Set<Term> terms = new HashSet<>();
|
||||
int num = scaledRandomIntBetween(1, 10);
|
||||
for (int i = 0; i < num; i++) {
|
||||
terms.add(new Term(TestUtil.randomRealisticUnicodeString(random(), 1, 10), TestUtil.randomRealisticUnicodeString(random(), 1, 10)));
|
||||
terms.add(new Term(randomRealisticUnicodeOfLengthBetween(1, 10), randomRealisticUnicodeOfLengthBetween(1, 10)));
|
||||
}
|
||||
|
||||
BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery(terms.toArray(new Term[0]), random().nextFloat());
|
||||
|
@ -213,11 +213,7 @@ public class TransportClientNodesServiceTests extends ESTestCase {
|
||||
transport.endConnectMode();
|
||||
transportService.stop();
|
||||
transportClientNodesService.close();
|
||||
try {
|
||||
terminate(threadPool);
|
||||
} catch (InterruptedException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
terminate(threadPool);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class UpgradeIndexSettingsIT extends ESSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean forbidPrivateIndexSettings() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testSettingsVersion() {
|
||||
createIndex(
|
||||
"test",
|
||||
Settings
|
||||
.builder()
|
||||
.put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(), Version.CURRENT.minimumIndexCompatibilityVersion())
|
||||
.build());
|
||||
final long settingsVersion =
|
||||
client().admin().cluster().prepareState().get().getState().metaData().index("test").getSettingsVersion();
|
||||
client().admin().indices().prepareUpgrade("test").get();
|
||||
assertThat(
|
||||
client().admin().cluster().prepareState().get().getState().metaData().index("test").getSettingsVersion(),
|
||||
equalTo(1 + settingsVersion));
|
||||
}
|
||||
|
||||
}
|
@ -79,7 +79,8 @@ public class FreqTermsEnumTests extends ESTestCase {
|
||||
referenceFilter = new HashMap<>();
|
||||
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new KeywordAnalyzer()); // use keyword analyzer we rely on the stored field holding the exact term.
|
||||
// use keyword analyzer we rely on the stored field holding the exact term.
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new KeywordAnalyzer());
|
||||
if (frequently()) {
|
||||
// we don't want to do any merges, so we won't expunge deletes
|
||||
conf.setMergePolicy(NoMergePolicy.INSTANCE);
|
||||
@ -189,12 +190,14 @@ public class FreqTermsEnumTests extends ESTestCase {
|
||||
assertAgainstReference(false, true, filter, referenceFilter);
|
||||
}
|
||||
|
||||
private void assertAgainstReference(boolean docFreq, boolean totalTermFreq, Query filter, Map<String, FreqHolder> reference) throws Exception {
|
||||
private void assertAgainstReference(boolean docFreq, boolean totalTermFreq, Query filter,
|
||||
Map<String, FreqHolder> reference) throws Exception {
|
||||
FreqTermsEnum freqTermsEnum = new FreqTermsEnum(reader, "field", docFreq, totalTermFreq, filter, BigArrays.NON_RECYCLING_INSTANCE);
|
||||
assertAgainstReference(freqTermsEnum, reference, docFreq, totalTermFreq);
|
||||
}
|
||||
|
||||
private void assertAgainstReference(FreqTermsEnum termsEnum, Map<String, FreqHolder> reference, boolean docFreq, boolean totalTermFreq) throws Exception {
|
||||
private void assertAgainstReference(FreqTermsEnum termsEnum, Map<String, FreqHolder> reference, boolean docFreq,
|
||||
boolean totalTermFreq) throws Exception {
|
||||
int cycles = randomIntBetween(1, 5);
|
||||
for (int i = 0; i < cycles; i++) {
|
||||
List<String> terms = new ArrayList<>(Arrays.asList(this.terms));
|
||||
@ -209,7 +212,8 @@ public class FreqTermsEnumTests extends ESTestCase {
|
||||
assertThat("cycle " + i + ", term " + term + ", docFreq", termsEnum.docFreq(), equalTo(reference.get(term).docFreq));
|
||||
}
|
||||
if (totalTermFreq) {
|
||||
assertThat("cycle " + i + ", term " + term + ", totalTermFreq", termsEnum.totalTermFreq(), equalTo(reference.get(term).totalTermFreq));
|
||||
assertThat("cycle " + i + ", term " + term + ", totalTermFreq", termsEnum.totalTermFreq(),
|
||||
equalTo(reference.get(term).totalTermFreq));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -185,11 +185,22 @@ public class CollectionUtilsTests extends ESTestCase {
|
||||
CollectionUtils.ensureNoSelfReferences(emptyMap(), "test with empty map");
|
||||
CollectionUtils.ensureNoSelfReferences(null, "test with null");
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("field", map);
|
||||
{
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("field", map);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> CollectionUtils.ensureNoSelfReferences(map, "test with self ref value"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself (test with self ref value)"));
|
||||
}
|
||||
{
|
||||
Map<Object, Object> map = new HashMap<>();
|
||||
map.put(map, 1);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> CollectionUtils.ensureNoSelfReferences(map, "test with self ref key"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself (test with self ref key)"));
|
||||
}
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> CollectionUtils.ensureNoSelfReferences(map, "test with self ref"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself (test with self ref)"));
|
||||
}
|
||||
}
|
||||
|
@ -69,11 +69,7 @@ public class TransportActionProxyTests extends ESTestCase {
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
IOUtils.close(serviceA, serviceB, serviceC, () -> {
|
||||
try {
|
||||
terminate(threadPool);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
terminate(threadPool);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -164,6 +164,8 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
private final AtomicInteger replicaId = new AtomicInteger();
|
||||
private final AtomicInteger docId = new AtomicInteger();
|
||||
boolean closed = false;
|
||||
private ReplicationTargets replicationTargets;
|
||||
|
||||
private final PrimaryReplicaSyncer primaryReplicaSyncer = new PrimaryReplicaSyncer(Settings.EMPTY,
|
||||
new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()),
|
||||
(request, parentTask, primaryAllocationId, primaryTerm, listener) -> {
|
||||
@ -277,6 +279,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
for (final IndexShard replica : replicas) {
|
||||
recoverReplica(replica);
|
||||
}
|
||||
computeReplicationTargets();
|
||||
}
|
||||
|
||||
public IndexShard addReplica() throws IOException {
|
||||
@ -320,8 +323,9 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
*/
|
||||
public Future<PrimaryReplicaSyncer.ResyncTask> promoteReplicaToPrimary(IndexShard replica) throws IOException {
|
||||
PlainActionFuture<PrimaryReplicaSyncer.ResyncTask> fut = new PlainActionFuture<>();
|
||||
promoteReplicaToPrimary(replica,
|
||||
(shard, listener) -> primaryReplicaSyncer.resync(shard,
|
||||
promoteReplicaToPrimary(replica, (shard, listener) -> {
|
||||
computeReplicationTargets();
|
||||
primaryReplicaSyncer.resync(shard,
|
||||
new ActionListener<PrimaryReplicaSyncer.ResyncTask>() {
|
||||
@Override
|
||||
public void onResponse(PrimaryReplicaSyncer.ResyncTask resyncTask) {
|
||||
@ -334,7 +338,8 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
listener.onFailure(e);
|
||||
fut.onFailure(e);
|
||||
}
|
||||
}));
|
||||
});
|
||||
});
|
||||
return fut;
|
||||
}
|
||||
|
||||
@ -370,6 +375,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
final boolean removed = replicas.remove(replica);
|
||||
if (removed) {
|
||||
updateAllocationIDsOnPrimary();
|
||||
computeReplicationTargets();
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
@ -392,6 +398,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
ESIndexLevelReplicationTestCase.this.recoverUnstartedReplica(replica, primary, targetSupplier, markAsRecovering, inSyncIds,
|
||||
routingTable);
|
||||
ESIndexLevelReplicationTestCase.this.startReplicaAfterRecovery(replica, primary, inSyncIds, routingTable);
|
||||
computeReplicationTargets();
|
||||
}
|
||||
|
||||
public synchronized DiscoveryNode getPrimaryNode() {
|
||||
@ -468,6 +475,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
|
||||
public synchronized void reinitPrimaryShard() throws IOException {
|
||||
primary = reinitShard(primary);
|
||||
computeReplicationTargets();
|
||||
}
|
||||
|
||||
public void syncGlobalCheckpoint() {
|
||||
@ -486,6 +494,24 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
currentClusterStateVersion.incrementAndGet(),
|
||||
activeIds(), routingTable(Function.identity()), Collections.emptySet());
|
||||
}
|
||||
|
||||
private synchronized void computeReplicationTargets() {
|
||||
this.replicationTargets = new ReplicationTargets(primary, replicas);
|
||||
}
|
||||
|
||||
private synchronized ReplicationTargets getReplicationTargets() {
|
||||
return replicationTargets;
|
||||
}
|
||||
}
|
||||
|
||||
static final class ReplicationTargets {
|
||||
final IndexShard primary;
|
||||
final List<IndexShard> replicas;
|
||||
|
||||
ReplicationTargets(IndexShard primary, List<IndexShard> replicas) {
|
||||
this.primary = primary;
|
||||
this.replicas = Collections.unmodifiableList(replicas);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract class ReplicationAction<Request extends ReplicationRequest<Request>,
|
||||
@ -493,13 +519,13 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
Response extends ReplicationResponse> {
|
||||
private final Request request;
|
||||
private ActionListener<Response> listener;
|
||||
private final ReplicationGroup replicationGroup;
|
||||
private final ReplicationTargets replicationTargets;
|
||||
private final String opType;
|
||||
|
||||
protected ReplicationAction(Request request, ActionListener<Response> listener, ReplicationGroup group, String opType) {
|
||||
this.request = request;
|
||||
this.listener = listener;
|
||||
this.replicationGroup = group;
|
||||
this.replicationTargets = group.getReplicationTargets();
|
||||
this.opType = opType;
|
||||
}
|
||||
|
||||
@ -523,7 +549,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
}
|
||||
|
||||
IndexShard getPrimaryShard() {
|
||||
return replicationGroup.primary;
|
||||
return replicationTargets.primary;
|
||||
}
|
||||
|
||||
protected abstract PrimaryResult performOnPrimary(IndexShard primary, Request request) throws Exception;
|
||||
@ -534,7 +560,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
|
||||
@Override
|
||||
public ShardRouting routingEntry() {
|
||||
return replicationGroup.primary.routingEntry();
|
||||
return getPrimaryShard().routingEntry();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -544,37 +570,37 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
|
||||
@Override
|
||||
public PrimaryResult perform(Request request) throws Exception {
|
||||
return performOnPrimary(replicationGroup.primary, request);
|
||||
return performOnPrimary(getPrimaryShard(), request);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateLocalCheckpointForShard(String allocationId, long checkpoint) {
|
||||
replicationGroup.getPrimary().updateLocalCheckpointForShard(allocationId, checkpoint);
|
||||
getPrimaryShard().updateLocalCheckpointForShard(allocationId, checkpoint);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateGlobalCheckpointForShard(String allocationId, long globalCheckpoint) {
|
||||
replicationGroup.getPrimary().updateGlobalCheckpointForShard(allocationId, globalCheckpoint);
|
||||
getPrimaryShard().updateGlobalCheckpointForShard(allocationId, globalCheckpoint);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long localCheckpoint() {
|
||||
return replicationGroup.getPrimary().getLocalCheckpoint();
|
||||
return getPrimaryShard().getLocalCheckpoint();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long globalCheckpoint() {
|
||||
return replicationGroup.getPrimary().getGlobalCheckpoint();
|
||||
return getPrimaryShard().getGlobalCheckpoint();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long maxSeqNoOfUpdatesOrDeletes() {
|
||||
return replicationGroup.getPrimary().getMaxSeqNoOfUpdatesOrDeletes();
|
||||
return getPrimaryShard().getMaxSeqNoOfUpdatesOrDeletes();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.elasticsearch.index.shard.ReplicationGroup getReplicationGroup() {
|
||||
return replicationGroup.primary.getReplicationGroup();
|
||||
return getPrimaryShard().getReplicationGroup();
|
||||
}
|
||||
|
||||
}
|
||||
@ -588,10 +614,10 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
final long globalCheckpoint,
|
||||
final long maxSeqNoOfUpdatesOrDeletes,
|
||||
final ActionListener<ReplicationOperation.ReplicaResponse> listener) {
|
||||
IndexShard replica = replicationGroup.replicas.stream()
|
||||
IndexShard replica = replicationTargets.replicas.stream()
|
||||
.filter(s -> replicaRouting.isSameAllocation(s.routingEntry())).findFirst().get();
|
||||
replica.acquireReplicaOperationPermit(
|
||||
replicationGroup.primary.getPendingPrimaryTerm(),
|
||||
getPrimaryShard().getPendingPrimaryTerm(),
|
||||
globalCheckpoint,
|
||||
maxSeqNoOfUpdatesOrDeletes,
|
||||
new ActionListener<Releasable>() {
|
||||
|
@ -50,7 +50,6 @@ import org.elasticsearch.action.admin.indices.segments.ShardSegments;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.search.ClearScrollResponse;
|
||||
@ -183,7 +182,6 @@ import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
@ -382,8 +380,6 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||
case TEST:
|
||||
currentCluster = buildAndPutCluster(currentClusterScope, randomLong());
|
||||
break;
|
||||
default:
|
||||
fail("Unknown Scope: [" + currentClusterScope + "]");
|
||||
}
|
||||
cluster().beforeTest(random(), getPerTestTransportClientRatio());
|
||||
cluster().wipe(excludeTemplates());
|
||||
@ -402,7 +398,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||
* Creates a randomized index template. This template is used to pass in randomized settings on a
|
||||
* per index basis. Allows to enable/disable the randomization for number of shards and replicas
|
||||
*/
|
||||
public void randomIndexTemplate() throws IOException {
|
||||
public void randomIndexTemplate() {
|
||||
|
||||
// TODO move settings for random directory etc here into the index based randomized settings.
|
||||
if (cluster().size() > 0) {
|
||||
@ -798,12 +794,12 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||
|
||||
if (numNodes > 0) {
|
||||
internalCluster().ensureAtLeastNumDataNodes(numNodes);
|
||||
getExcludeSettings(index, numNodes, builder);
|
||||
getExcludeSettings(numNodes, builder);
|
||||
}
|
||||
return client().admin().indices().prepareCreate(index).setSettings(builder.build());
|
||||
}
|
||||
|
||||
private Settings.Builder getExcludeSettings(String index, int num, Settings.Builder builder) {
|
||||
private Settings.Builder getExcludeSettings(int num, Settings.Builder builder) {
|
||||
String exclude = String.join(",", internalCluster().allDataNodesButN(num));
|
||||
builder.put("index.routing.allocation.exclude._name", exclude);
|
||||
return builder;
|
||||
@ -896,7 +892,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||
internalCluster().ensureAtLeastNumDataNodes(n);
|
||||
Settings.Builder builder = Settings.builder();
|
||||
if (n > 0) {
|
||||
getExcludeSettings(index, n, builder);
|
||||
getExcludeSettings(n, builder);
|
||||
}
|
||||
Settings build = builder.build();
|
||||
if (!build.isEmpty()) {
|
||||
@ -1296,16 +1292,6 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||
return client().prepareIndex(index, type, id).setSource(source).execute().actionGet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Syntactic sugar for:
|
||||
* <pre>
|
||||
* client().prepareGet(index, type, id).execute().actionGet();
|
||||
* </pre>
|
||||
*/
|
||||
protected final GetResponse get(String index, String type, String id) {
|
||||
return client().prepareGet(index, type, id).execute().actionGet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Syntactic sugar for:
|
||||
* <pre>
|
||||
@ -1418,12 +1404,12 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||
/**
|
||||
* Convenience method that forwards to {@link #indexRandom(boolean, List)}.
|
||||
*/
|
||||
public void indexRandom(boolean forceRefresh, IndexRequestBuilder... builders) throws InterruptedException, ExecutionException {
|
||||
public void indexRandom(boolean forceRefresh, IndexRequestBuilder... builders) throws InterruptedException {
|
||||
indexRandom(forceRefresh, Arrays.asList(builders));
|
||||
}
|
||||
|
||||
public void indexRandom(boolean forceRefresh, boolean dummyDocuments, IndexRequestBuilder... builders)
|
||||
throws InterruptedException, ExecutionException {
|
||||
throws InterruptedException {
|
||||
indexRandom(forceRefresh, dummyDocuments, Arrays.asList(builders));
|
||||
}
|
||||
|
||||
@ -1442,7 +1428,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||
* @param builders the documents to index.
|
||||
* @see #indexRandom(boolean, boolean, java.util.List)
|
||||
*/
|
||||
public void indexRandom(boolean forceRefresh, List<IndexRequestBuilder> builders) throws InterruptedException, ExecutionException {
|
||||
public void indexRandom(boolean forceRefresh, List<IndexRequestBuilder> builders) throws InterruptedException {
|
||||
indexRandom(forceRefresh, forceRefresh, builders);
|
||||
}
|
||||
|
||||
@ -1459,7 +1445,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||
* @param builders the documents to index.
|
||||
*/
|
||||
public void indexRandom(boolean forceRefresh, boolean dummyDocuments, List<IndexRequestBuilder> builders)
|
||||
throws InterruptedException, ExecutionException {
|
||||
throws InterruptedException {
|
||||
indexRandom(forceRefresh, dummyDocuments, true, builders);
|
||||
}
|
||||
|
||||
@ -1477,7 +1463,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||
* @param builders the documents to index.
|
||||
*/
|
||||
public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean maybeFlush, List<IndexRequestBuilder> builders)
|
||||
throws InterruptedException, ExecutionException {
|
||||
throws InterruptedException {
|
||||
Random random = random();
|
||||
Map<String, Set<String>> indicesAndTypes = new HashMap<>();
|
||||
for (IndexRequestBuilder builder : builders) {
|
||||
|
@ -250,7 +250,6 @@ public abstract class ESTestCase extends LuceneTestCase {
|
||||
}
|
||||
|
||||
protected final Logger logger = Loggers.getLogger(getClass());
|
||||
protected final DeprecationLogger deprecationLogger = new DeprecationLogger(logger);
|
||||
private ThreadContext threadContext;
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
@ -370,7 +369,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
||||
return "[" + name.substring(start + 1, end) + "] ";
|
||||
}
|
||||
|
||||
private void ensureNoWarnings() throws IOException {
|
||||
private void ensureNoWarnings() {
|
||||
//Check that there are no unaccounted warning headers. These should be checked with {@link #assertWarnings(String...)} in the
|
||||
//appropriate test
|
||||
try {
|
||||
@ -510,7 +509,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
||||
checkIndexFailed = false;
|
||||
}
|
||||
|
||||
public final void ensureCheckIndexPassed() throws Exception {
|
||||
public final void ensureCheckIndexPassed() {
|
||||
assertFalse("at least one shard failed CheckIndex", checkIndexFailed);
|
||||
}
|
||||
|
||||
@ -878,7 +877,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
||||
return breakSupplier.getAsBoolean();
|
||||
}
|
||||
|
||||
public static boolean terminate(ExecutorService... services) throws InterruptedException {
|
||||
public static boolean terminate(ExecutorService... services) {
|
||||
boolean terminated = true;
|
||||
for (ExecutorService service : services) {
|
||||
if (service != null) {
|
||||
@ -888,7 +887,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
||||
return terminated;
|
||||
}
|
||||
|
||||
public static boolean terminate(ThreadPool threadPool) throws InterruptedException {
|
||||
public static boolean terminate(ThreadPool threadPool) {
|
||||
return ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
@ -943,23 +942,6 @@ public abstract class ESTestCase extends LuceneTestCase {
|
||||
return builder;
|
||||
}
|
||||
|
||||
private static String threadName(Thread t) {
|
||||
return "Thread[" +
|
||||
"id=" + t.getId() +
|
||||
", name=" + t.getName() +
|
||||
", state=" + t.getState() +
|
||||
", group=" + groupName(t.getThreadGroup()) +
|
||||
"]";
|
||||
}
|
||||
|
||||
private static String groupName(ThreadGroup threadGroup) {
|
||||
if (threadGroup == null) {
|
||||
return "{null group}";
|
||||
} else {
|
||||
return threadGroup.getName();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns size random values
|
||||
*/
|
||||
|
@ -696,13 +696,6 @@ public final class InternalTestCluster extends TestCluster {
|
||||
return suffix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the common node name prefix for this test cluster.
|
||||
*/
|
||||
public String nodePrefix() {
|
||||
return nodePrefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Client client() {
|
||||
ensureOpen();
|
||||
@ -804,21 +797,6 @@ public final class InternalTestCluster extends TestCluster {
|
||||
return null; // can't happen
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a random node that applies to the given predicate.
|
||||
* The predicate can filter nodes based on the nodes settings.
|
||||
* If all nodes are filtered out this method will return <code>null</code>
|
||||
*/
|
||||
public synchronized Client client(final Predicate<Settings> filterPredicate) {
|
||||
ensureOpen();
|
||||
final NodeAndClient randomNodeAndClient =
|
||||
getRandomNodeAndClient(nodeAndClient -> filterPredicate.test(nodeAndClient.node.settings()));
|
||||
if (randomNodeAndClient != null) {
|
||||
return randomNodeAndClient.client(random);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void close() {
|
||||
if (this.open.compareAndSet(true, false)) {
|
||||
@ -914,7 +892,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||
return clientWrapper.apply(transportClient);
|
||||
}
|
||||
|
||||
void resetClient() throws IOException {
|
||||
void resetClient() {
|
||||
if (closed.get() == false) {
|
||||
Releasables.close(nodeClient, transportClient);
|
||||
nodeClient = null;
|
||||
@ -1388,7 +1366,7 @@ public final class InternalTestCluster extends TestCluster {
|
||||
});
|
||||
}
|
||||
|
||||
private void randomlyResetClients() throws IOException {
|
||||
private void randomlyResetClients() {
|
||||
// only reset the clients on nightly tests, it causes heavy load...
|
||||
if (RandomizedTest.isNightly() && rarely(random)) {
|
||||
final Collection<NodeAndClient> nodesAndClients = nodes.values();
|
||||
@ -1654,20 +1632,6 @@ public final class InternalTestCluster extends TestCluster {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restarts a random node in the cluster
|
||||
*/
|
||||
public void restartRandomNode() throws Exception {
|
||||
restartRandomNode(EMPTY_CALLBACK);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restarts a random node in the cluster and calls the callback during restart.
|
||||
*/
|
||||
public void restartRandomNode(RestartCallback callback) throws Exception {
|
||||
restartRandomNode(nc -> true, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restarts a random data node in the cluster
|
||||
*/
|
||||
@ -1718,13 +1682,6 @@ public final class InternalTestCluster extends TestCluster {
|
||||
fullRestart(EMPTY_CALLBACK);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restarts all nodes in a rolling restart fashion ie. only restarts on node a time.
|
||||
*/
|
||||
public void rollingRestart() throws Exception {
|
||||
rollingRestart(EMPTY_CALLBACK);
|
||||
}
|
||||
|
||||
/**
|
||||
* Restarts all nodes in a rolling restart fashion ie. only restarts on node a time.
|
||||
*/
|
||||
|
@ -186,11 +186,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
||||
assertNoPendingHandshakes(serviceB.getOriginalTransport());
|
||||
} finally {
|
||||
IOUtils.close(serviceA, serviceB, () -> {
|
||||
try {
|
||||
terminate(threadPool);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
terminate(threadPool);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -247,10 +247,7 @@ public class MockWebServer implements Closeable {
|
||||
latches.forEach(CountDownLatch::countDown);
|
||||
|
||||
if (server.getExecutor() instanceof ExecutorService) {
|
||||
try {
|
||||
terminate((ExecutorService) server.getExecutor());
|
||||
} catch (InterruptedException e) {
|
||||
}
|
||||
terminate((ExecutorService) server.getExecutor());
|
||||
}
|
||||
server.stop(0);
|
||||
}
|
||||
|
@ -125,11 +125,7 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||
public static void shutdownHttpServer() {
|
||||
final Executor executor = httpServer.getExecutor();
|
||||
if (executor instanceof ExecutorService) {
|
||||
try {
|
||||
terminate((ExecutorService) executor);
|
||||
} catch (InterruptedException e) {
|
||||
// oh well
|
||||
}
|
||||
terminate((ExecutorService) executor);
|
||||
}
|
||||
httpServer.stop(0);
|
||||
httpServer = null;
|
||||
|
Loading…
x
Reference in New Issue
Block a user