Clean up "unused variable" warnings (#31876)
This change cleans up "unused variable" warnings. There are several cases were we most likely want to suppress the warnings (especially in the client documentation test where the snippets contain many unused variables). In a lot of cases the unused variables can just be deleted though.
This commit is contained in:
parent
48c169e065
commit
ba3ceeaccf
|
@ -114,6 +114,7 @@ import static org.hamcrest.Matchers.not;
|
|||
*/
|
||||
public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testIndex() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -278,6 +279,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testUpdate() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
|
@ -546,6 +548,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testDelete() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -665,6 +668,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testBulk() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
|
@ -767,6 +771,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testReindex() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
|
@ -905,6 +910,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testReindexRethrottle() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
TaskId taskId = new TaskId("oTUltX4IQMOUUVeiohTt8A:124");
|
||||
|
@ -947,6 +953,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testUpdateByQuery() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
|
@ -1066,6 +1073,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testDeleteByQuery() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
|
@ -1173,6 +1181,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testGet() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
|
@ -1487,6 +1496,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testMultiGet() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -192,6 +192,7 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testClusterGetSettings() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -257,6 +258,7 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testClusterHealth() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
client.indices().create(new CreateIndexRequest("index"), RequestOptions.DEFAULT);
|
||||
|
|
|
@ -706,6 +706,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testGetFieldMapping() throws IOException, InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -891,6 +892,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testRefreshIndex() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -959,6 +961,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testFlushIndex() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -1035,6 +1038,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testSyncedFlushIndex() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -1308,6 +1312,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testForceMergeIndex() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -1381,6 +1386,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testClearCache() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -1527,6 +1533,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testExistsAlias() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -1590,6 +1597,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testUpdateAliases() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -1915,6 +1923,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testGetAlias() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -1985,6 +1994,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testIndexPutSettings() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -2315,6 +2325,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testValidateQuery() throws IOException, InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -143,6 +143,7 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testGetPipeline() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -413,6 +413,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testSearchRequestHighlighting() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
|
@ -831,6 +832,8 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testMultiSearchTemplateWithInlineScript() throws Exception {
|
||||
indexSearchTestData();
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
|
|
@ -577,6 +577,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testSnapshotGetSnapshots() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -66,6 +66,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
*/
|
||||
public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testGetStoredScript() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -128,6 +129,7 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testDeleteStoredScript() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -66,6 +66,7 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
*/
|
||||
public class TasksClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testListTasks() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
|
@ -149,6 +150,7 @@ public class TasksClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void testCancelTasks() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
{
|
||||
|
|
|
@ -35,12 +35,12 @@ import java.io.InputStreamReader;
|
|||
import java.io.UncheckedIOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Collections;
|
||||
|
||||
public final class Grok {
|
||||
|
||||
|
@ -184,6 +184,7 @@ public final class Grok {
|
|||
String namedPatternRef = groupMatch(NAME_GROUP, region, grokPattern);
|
||||
String subName = groupMatch(SUBNAME_GROUP, region, grokPattern);
|
||||
// TODO(tal): Support definitions
|
||||
@SuppressWarnings("unused")
|
||||
String definition = groupMatch(DEFINITION_GROUP, region, grokPattern);
|
||||
String patternName = groupMatch(PATTERN_GROUP, region, grokPattern);
|
||||
|
||||
|
|
|
@ -262,8 +262,6 @@ public class GrokTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testBooleanCaptures() {
|
||||
Map<String, String> bank = new HashMap<>();
|
||||
|
||||
String pattern = "%{WORD:name}=%{WORD:status:boolean}";
|
||||
Grok g = new Grok(basePatterns, pattern);
|
||||
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.nio;
|
|||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
@ -61,8 +60,6 @@ public class FlushOperationTests extends ESTestCase {
|
|||
ByteBuffer[] buffers = {ByteBuffer.allocate(10), ByteBuffer.allocate(15), ByteBuffer.allocate(3)};
|
||||
FlushOperation writeOp = new FlushOperation(buffers, listener);
|
||||
|
||||
ArgumentCaptor<ByteBuffer[]> buffersCaptor = ArgumentCaptor.forClass(ByteBuffer[].class);
|
||||
|
||||
writeOp.incrementIndex(5);
|
||||
assertFalse(writeOp.isFullyFlushed());
|
||||
ByteBuffer[] byteBuffers = writeOp.getBuffersToWrite();
|
||||
|
|
|
@ -222,6 +222,7 @@ public class ObjectParserTests extends ESTestCase {
|
|||
public void testFailOnValueType() throws IOException {
|
||||
XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"numeric_value\" : false}");
|
||||
class TestStruct {
|
||||
@SuppressWarnings("unused")
|
||||
public String test;
|
||||
}
|
||||
ObjectParser<TestStruct, Void> objectParser = new ObjectParser<>("foo");
|
||||
|
|
|
@ -54,6 +54,7 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory {
|
|||
private final CharArraySet keepWords;
|
||||
private static final String KEEP_WORDS_KEY = "keep_words";
|
||||
private static final String KEEP_WORDS_PATH_KEY = KEEP_WORDS_KEY + "_path";
|
||||
@SuppressWarnings("unused")
|
||||
private static final String KEEP_WORDS_CASE_KEY = KEEP_WORDS_KEY + "_case"; // for javadoc
|
||||
|
||||
// unsupported ancient option
|
||||
|
|
|
@ -53,7 +53,6 @@ import org.apache.lucene.analysis.pt.PortugueseStemFilter;
|
|||
import org.apache.lucene.analysis.ru.RussianLightStemFilter;
|
||||
import org.apache.lucene.analysis.snowball.SnowballFilter;
|
||||
import org.apache.lucene.analysis.sv.SwedishLightStemFilter;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -94,8 +93,6 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
|
||||
@Override
|
||||
public TokenStream create(TokenStream tokenStream) {
|
||||
final Version indexVersion = indexSettings.getIndexVersionCreated();
|
||||
|
||||
if ("arabic".equalsIgnoreCase(language)) {
|
||||
return new ArabicStemFilter(tokenStream);
|
||||
} else if ("armenian".equalsIgnoreCase(language)) {
|
||||
|
|
|
@ -295,7 +295,6 @@ public final class Locals {
|
|||
public final Class<?> clazz;
|
||||
public final boolean readonly;
|
||||
private final int slot;
|
||||
private boolean used;
|
||||
|
||||
public Variable(Location location, String name, Class<?> clazz, int slot, boolean readonly) {
|
||||
this.location = location;
|
||||
|
|
|
@ -91,7 +91,7 @@ public class ChildrenIT extends ParentChildTestCase {
|
|||
String category = categories[j] = uniqueCategories[catIndex++ % uniqueCategories.length];
|
||||
Control control = categoryToControl.get(category);
|
||||
if (control == null) {
|
||||
categoryToControl.put(category, control = new Control(category));
|
||||
categoryToControl.put(category, control = new Control());
|
||||
}
|
||||
control.articleIds.add(id);
|
||||
}
|
||||
|
@ -457,14 +457,8 @@ public class ChildrenIT extends ParentChildTestCase {
|
|||
}
|
||||
|
||||
private static final class Control {
|
||||
|
||||
final String category;
|
||||
final Set<String> articleIds = new HashSet<>();
|
||||
final Set<String> commentIds = new HashSet<>();
|
||||
final Map<String, Set<String>> commenterToCommentId = new HashMap<>();
|
||||
|
||||
private Control(String category) {
|
||||
this.category = category;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ import java.util.Collections;
|
|||
|
||||
public class ReindexDocumentationIT extends ESIntegTestCase {
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void reindex() {
|
||||
Client client = client();
|
||||
// tag::reindex1
|
||||
|
@ -55,6 +56,7 @@ public class ReindexDocumentationIT extends ESIntegTestCase {
|
|||
// end::reindex1
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void updateByQuery() {
|
||||
Client client = client();
|
||||
{
|
||||
|
@ -165,6 +167,7 @@ public class ReindexDocumentationIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void deleteByQuery() {
|
||||
Client client = client();
|
||||
// tag::delete-by-query-sync
|
||||
|
|
|
@ -86,6 +86,7 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
|||
this.collator = ref.collator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CollationFieldType clone() {
|
||||
return new CollationFieldType(this);
|
||||
}
|
||||
|
@ -239,7 +240,6 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
|||
private boolean numeric = false;
|
||||
private String variableTop = null;
|
||||
private boolean hiraganaQuaternaryMode = false;
|
||||
private String nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.cluster.service.ClusterService;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
|
@ -48,7 +47,6 @@ public class TransportMainAction extends HandledTransportAction<MainRequest, Mai
|
|||
protected void doExecute(Task task, MainRequest request, ActionListener<MainResponse> listener) {
|
||||
ClusterState clusterState = clusterService.state();
|
||||
assert Node.NODE_NAME_SETTING.exists(settings);
|
||||
final boolean available = clusterState.getBlocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE) == false;
|
||||
listener.onResponse(
|
||||
new MainResponse(Node.NODE_NAME_SETTING.get(settings), Version.CURRENT, clusterState.getClusterName(),
|
||||
clusterState.metaData().clusterUUID(), Build.CURRENT));
|
||||
|
|
|
@ -44,7 +44,6 @@ public abstract class SingleShardRequest<Request extends SingleShardRequest<Requ
|
|||
@Nullable
|
||||
protected String index;
|
||||
ShardId internalShardId;
|
||||
private boolean threadedOperation = true;
|
||||
|
||||
public SingleShardRequest() {
|
||||
}
|
||||
|
|
|
@ -634,7 +634,6 @@ public class MoreTypes {
|
|||
private final int modifiers;
|
||||
private final boolean synthetic;
|
||||
private final Class<? extends Member> memberType;
|
||||
private final String memberKey;
|
||||
|
||||
private MemberImpl(Member member) {
|
||||
this.declaringClass = member.getDeclaringClass();
|
||||
|
@ -642,7 +641,6 @@ public class MoreTypes {
|
|||
this.modifiers = member.getModifiers();
|
||||
this.synthetic = member.isSynthetic();
|
||||
this.memberType = memberType(member);
|
||||
this.memberKey = memberKey(member);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -759,11 +759,11 @@ public final class XMoreLikeThis {
|
|||
|
||||
if (queue.size() < limit) {
|
||||
// there is still space in the queue
|
||||
queue.add(new ScoreTerm(word, topField, score, idf, docFreq, tf));
|
||||
queue.add(new ScoreTerm(word, topField, score));
|
||||
} else {
|
||||
ScoreTerm term = queue.top();
|
||||
if (term.score < score) { // update the smallest in the queue in place and update the queue.
|
||||
term.update(word, topField, score, idf, docFreq, tf);
|
||||
term.update(word, topField, score);
|
||||
queue.updateTop();
|
||||
}
|
||||
}
|
||||
|
@ -1026,30 +1026,20 @@ public final class XMoreLikeThis {
|
|||
}
|
||||
|
||||
private static class ScoreTerm {
|
||||
// only really need 1st 3 entries, other ones are for troubleshooting
|
||||
String word;
|
||||
String topField;
|
||||
float score;
|
||||
float idf;
|
||||
int docFreq;
|
||||
int tf;
|
||||
|
||||
ScoreTerm(String word, String topField, float score, float idf, int docFreq, int tf) {
|
||||
ScoreTerm(String word, String topField, float score) {
|
||||
this.word = word;
|
||||
this.topField = topField;
|
||||
this.score = score;
|
||||
this.idf = idf;
|
||||
this.docFreq = docFreq;
|
||||
this.tf = tf;
|
||||
}
|
||||
|
||||
void update(String word, String topField, float score, float idf, int docFreq, int tf) {
|
||||
void update(String word, String topField, float score) {
|
||||
this.word = word;
|
||||
this.topField = topField;
|
||||
this.score = score;
|
||||
this.idf = idf;
|
||||
this.docFreq = docFreq;
|
||||
this.tf = tf;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,8 +30,6 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -55,7 +53,6 @@ import static org.elasticsearch.index.mapper.TypeParsers.parseField;
|
|||
* A field mapper for boolean fields.
|
||||
*/
|
||||
public class BooleanFieldMapper extends FieldMapper {
|
||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(BooleanFieldMapper.class));
|
||||
|
||||
public static final String CONTENT_TYPE = "boolean";
|
||||
|
||||
|
|
|
@ -306,12 +306,11 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||
// its an array of other possible values
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
double lon = context.parser().doubleValue();
|
||||
token = context.parser().nextToken();
|
||||
context.parser().nextToken();
|
||||
double lat = context.parser().doubleValue();
|
||||
token = context.parser().nextToken();
|
||||
Double alt = Double.NaN;
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
alt = GeoPoint.assertZValue(ignoreZValue.value(), context.parser().doubleValue());
|
||||
GeoPoint.assertZValue(ignoreZValue.value(), context.parser().doubleValue());
|
||||
} else if (token != XContentParser.Token.END_ARRAY) {
|
||||
throw new ElasticsearchParseException("[{}] field type does not accept > 3 dimensions", CONTENT_TYPE);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.AnalyzerWrapper;
|
||||
import org.apache.lucene.analysis.CachingTokenFilter;
|
||||
|
@ -42,7 +41,6 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.collect.Iterators;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
|
@ -64,8 +62,6 @@ import static org.elasticsearch.index.mapper.TypeParsers.parseTextField;
|
|||
/** A {@link FieldMapper} for full-text fields. */
|
||||
public class TextFieldMapper extends FieldMapper {
|
||||
|
||||
private static final Logger logger = ESLoggerFactory.getLogger(TextFieldMapper.class);
|
||||
|
||||
public static final String CONTENT_TYPE = "text";
|
||||
private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1;
|
||||
|
||||
|
@ -481,6 +477,7 @@ public class TextFieldMapper extends FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TextFieldType clone() {
|
||||
return new TextFieldType(this);
|
||||
}
|
||||
|
|
|
@ -159,10 +159,9 @@ public class AggregatorFactories {
|
|||
}
|
||||
}
|
||||
|
||||
public static final AggregatorFactories EMPTY = new AggregatorFactories(null, new AggregatorFactory<?>[0],
|
||||
public static final AggregatorFactories EMPTY = new AggregatorFactories(new AggregatorFactory<?>[0],
|
||||
new ArrayList<PipelineAggregationBuilder>());
|
||||
|
||||
private AggregatorFactory<?> parent;
|
||||
private AggregatorFactory<?>[] factories;
|
||||
private List<PipelineAggregationBuilder> pipelineAggregatorFactories;
|
||||
|
||||
|
@ -170,9 +169,7 @@ public class AggregatorFactories {
|
|||
return new Builder();
|
||||
}
|
||||
|
||||
private AggregatorFactories(AggregatorFactory<?> parent, AggregatorFactory<?>[] factories,
|
||||
List<PipelineAggregationBuilder> pipelineAggregators) {
|
||||
this.parent = parent;
|
||||
private AggregatorFactories(AggregatorFactory<?>[] factories, List<PipelineAggregationBuilder> pipelineAggregators) {
|
||||
this.factories = factories;
|
||||
this.pipelineAggregatorFactories = pipelineAggregators;
|
||||
}
|
||||
|
@ -328,7 +325,7 @@ public class AggregatorFactories {
|
|||
for (int i = 0; i < aggregationBuilders.size(); i++) {
|
||||
aggFactories[i] = aggregationBuilders.get(i).build(context, parent);
|
||||
}
|
||||
return new AggregatorFactories(parent, aggFactories, orderedpipelineAggregators);
|
||||
return new AggregatorFactories(aggFactories, orderedpipelineAggregators);
|
||||
}
|
||||
|
||||
private List<PipelineAggregationBuilder> resolvePipelineAggregatorOrder(
|
||||
|
|
|
@ -144,6 +144,13 @@ public class CompositeAggregationBuilder extends AbstractAggregationBuilder<Comp
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the number of composite buckets. Defaults to {@code 10}.
|
||||
*/
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregatorFactory<?> doBuild(SearchContext context, AggregatorFactory<?> parent,
|
||||
AggregatorFactories.Builder subfactoriesBuilder) throws IOException {
|
||||
|
|
|
@ -721,7 +721,6 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
|
|||
factory.storedFieldsContext =
|
||||
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), parser);
|
||||
} else if (SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
|
||||
List<String> fieldDataFields = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
FieldAndFormat ff = FieldAndFormat.fromXContent(parser);
|
||||
factory.docValueField(ff.field, ff.format);
|
||||
|
|
|
@ -485,8 +485,6 @@ public class SplitIndexIT extends ESIntegTestCase {
|
|||
ImmutableOpenMap<String, DiscoveryNode> dataNodes = client().admin().cluster().prepareState().get().getState().nodes()
|
||||
.getDataNodes();
|
||||
assertTrue("at least 2 nodes but was: " + dataNodes.size(), dataNodes.size() >= 2);
|
||||
DiscoveryNode[] discoveryNodes = dataNodes.values().toArray(DiscoveryNode.class);
|
||||
String mergeNode = discoveryNodes[0].getName();
|
||||
// ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node
|
||||
// if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due
|
||||
// to the require._name below.
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.action.admin.indices.mapping.get;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -119,20 +120,15 @@ public class GetMappingsResponseTests extends AbstractStreamableXContentTestCase
|
|||
private static Map<String, Object> randomFieldMapping() {
|
||||
Map<String, Object> mappings = new HashMap<>();
|
||||
if (randomBoolean()) {
|
||||
Map<String, Object> regularMapping = new HashMap<>();
|
||||
regularMapping.put("type", randomBoolean() ? "text" : "keyword");
|
||||
regularMapping.put("index", "analyzed");
|
||||
regularMapping.put("analyzer", "english");
|
||||
return regularMapping;
|
||||
mappings.put("type", randomBoolean() ? "text" : "keyword");
|
||||
mappings.put("index", "analyzed");
|
||||
mappings.put("analyzer", "english");
|
||||
} else if (randomBoolean()) {
|
||||
Map<String, Object> numberMapping = new HashMap<>();
|
||||
numberMapping.put("type", randomFrom("integer", "float", "long", "double"));
|
||||
numberMapping.put("index", Objects.toString(randomBoolean()));
|
||||
return numberMapping;
|
||||
mappings.put("type", randomFrom("integer", "float", "long", "double"));
|
||||
mappings.put("index", Objects.toString(randomBoolean()));
|
||||
} else if (randomBoolean()) {
|
||||
Map<String, Object> objMapping = new HashMap<>();
|
||||
objMapping.put("type", "object");
|
||||
objMapping.put("dynamic", "strict");
|
||||
mappings.put("type", "object");
|
||||
mappings.put("dynamic", "strict");
|
||||
Map<String, Object> properties = new HashMap<>();
|
||||
Map<String, Object> props1 = new HashMap<>();
|
||||
props1.put("type", randomFrom("text", "keyword"));
|
||||
|
@ -146,12 +142,10 @@ public class GetMappingsResponseTests extends AbstractStreamableXContentTestCase
|
|||
props3.put("index", "false");
|
||||
prop2properties.put("subsubfield", props3);
|
||||
props2.put("properties", prop2properties);
|
||||
objMapping.put("properties", properties);
|
||||
return objMapping;
|
||||
mappings.put("properties", properties);
|
||||
} else {
|
||||
Map<String, Object> plainMapping = new HashMap<>();
|
||||
plainMapping.put("type", "keyword");
|
||||
return plainMapping;
|
||||
mappings.put("type", "keyword");
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -171,6 +171,7 @@ public class TransportReplicationActionTests extends ESTestCase {
|
|||
action = new TestAction(Settings.EMPTY, "internal:testAction", transportService, clusterService, shardStateAction, threadPool);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
|
@ -511,6 +512,7 @@ public class TransportReplicationActionTests extends ESTestCase {
|
|||
ActionListener<TransportReplicationAction.PrimaryResult<Request, TestResponse>> actionListener,
|
||||
TransportReplicationAction<Request, Request, TestResponse>.PrimaryShardReference primaryShardReference) {
|
||||
return new NoopReplicationOperation(request, actionListener) {
|
||||
@Override
|
||||
public void execute() throws Exception {
|
||||
assertPhase(task, "primary");
|
||||
assertFalse(executed.getAndSet(true));
|
||||
|
@ -567,6 +569,7 @@ public class TransportReplicationActionTests extends ESTestCase {
|
|||
ActionListener<TransportReplicationAction.PrimaryResult<Request, TestResponse>> actionListener,
|
||||
TransportReplicationAction<Request, Request, TestResponse>.PrimaryShardReference primaryShardReference) {
|
||||
return new NoopReplicationOperation(request, actionListener) {
|
||||
@Override
|
||||
public void execute() throws Exception {
|
||||
assertPhase(task, "primary");
|
||||
assertFalse(executed.getAndSet(true));
|
||||
|
@ -697,13 +700,6 @@ public class TransportReplicationActionTests extends ESTestCase {
|
|||
return null;
|
||||
}).when(shard).acquirePrimaryOperationPermit(any(), anyString(), anyObject());
|
||||
|
||||
AtomicBoolean closed = new AtomicBoolean();
|
||||
Releasable releasable = () -> {
|
||||
if (closed.compareAndSet(false, true) == false) {
|
||||
fail("releasable is closed twice");
|
||||
}
|
||||
};
|
||||
|
||||
TestAction action =
|
||||
new TestAction(Settings.EMPTY, "internal:testSeqNoIsSetOnPrimary", transportService, clusterService, shardStateAction,
|
||||
threadPool) {
|
||||
|
@ -1112,8 +1108,6 @@ public class TransportReplicationActionTests extends ESTestCase {
|
|||
|
||||
private class TestAction extends TransportReplicationAction<Request, Request, TestResponse> {
|
||||
|
||||
private final boolean withDocumentFailureOnPrimary;
|
||||
private final boolean withDocumentFailureOnReplica;
|
||||
|
||||
TestAction(Settings settings, String actionName, TransportService transportService,
|
||||
ClusterService clusterService, ShardStateAction shardStateAction,
|
||||
|
@ -1122,8 +1116,6 @@ public class TransportReplicationActionTests extends ESTestCase {
|
|||
shardStateAction,
|
||||
new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY),
|
||||
Request::new, Request::new, ThreadPool.Names.SAME);
|
||||
this.withDocumentFailureOnPrimary = false;
|
||||
this.withDocumentFailureOnReplica = false;
|
||||
}
|
||||
|
||||
TestAction(Settings settings, String actionName, TransportService transportService,
|
||||
|
@ -1133,8 +1125,6 @@ public class TransportReplicationActionTests extends ESTestCase {
|
|||
shardStateAction,
|
||||
new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY),
|
||||
Request::new, Request::new, ThreadPool.Names.SAME);
|
||||
this.withDocumentFailureOnPrimary = withDocumentFailureOnPrimary;
|
||||
this.withDocumentFailureOnReplica = withDocumentFailureOnReplica;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1173,7 +1163,6 @@ public class TransportReplicationActionTests extends ESTestCase {
|
|||
Index index = (Index) invocation.getArguments()[0];
|
||||
final ClusterState state = clusterService.state();
|
||||
if (state.metaData().hasIndex(index.getName())) {
|
||||
final IndexMetaData indexSafe = state.metaData().getIndexSafe(index);
|
||||
return mockIndexService(clusterService.state().metaData().getIndexSafe(index), clusterService);
|
||||
} else {
|
||||
return null;
|
||||
|
|
|
@ -109,6 +109,7 @@ public class TransportWriteActionTests extends ESTestCase {
|
|||
clusterService = createClusterService(threadPool);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
|
@ -430,7 +431,6 @@ public class TransportWriteActionTests extends ESTestCase {
|
|||
Index index = (Index) invocation.getArguments()[0];
|
||||
final ClusterState state = clusterService.state();
|
||||
if (state.metaData().hasIndex(index.getName())) {
|
||||
final IndexMetaData indexSafe = state.metaData().getIndexSafe(index);
|
||||
return mockIndexService(clusterService.state().metaData().getIndexSafe(index), clusterService);
|
||||
} else {
|
||||
return null;
|
||||
|
|
|
@ -63,7 +63,6 @@ import java.util.stream.IntStream;
|
|||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
|
||||
import static org.elasticsearch.test.ClusterServiceUtils.setState;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.hamcrest.CoreMatchers.nullValue;
|
||||
import static org.hamcrest.CoreMatchers.startsWith;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -298,7 +297,7 @@ public class TemplateUpgradeServiceTests extends ESTestCase {
|
|||
return null;
|
||||
}).when(mockIndicesAdminClient).deleteTemplate(any(DeleteIndexTemplateRequest.class), any(ActionListener.class));
|
||||
|
||||
final TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool,
|
||||
new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool,
|
||||
Arrays.asList(
|
||||
templates -> {
|
||||
assertNull(templates.put("added_test_template", IndexTemplateMetaData.builder("added_test_template")
|
||||
|
@ -415,42 +414,6 @@ public class TemplateUpgradeServiceTests extends ESTestCase {
|
|||
assertThat(finishInvocation.availablePermits(), equalTo(0));
|
||||
}
|
||||
|
||||
private static final int NODE_TEST_ITERS = 100;
|
||||
|
||||
private DiscoveryNodes randomNodes(int dataAndMasterNodes, int clientNodes) {
|
||||
DiscoveryNodes.Builder builder = DiscoveryNodes.builder();
|
||||
String masterNodeId = null;
|
||||
for (int i = 0; i < dataAndMasterNodes; i++) {
|
||||
String id = randomAlphaOfLength(10) + "_" + i;
|
||||
Set<DiscoveryNode.Role> roles;
|
||||
if (i == 0) {
|
||||
masterNodeId = id;
|
||||
// The first node has to be master node
|
||||
if (randomBoolean()) {
|
||||
roles = EnumSet.of(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA);
|
||||
} else {
|
||||
roles = EnumSet.of(DiscoveryNode.Role.MASTER);
|
||||
}
|
||||
} else {
|
||||
if (randomBoolean()) {
|
||||
roles = EnumSet.of(DiscoveryNode.Role.DATA);
|
||||
} else {
|
||||
roles = EnumSet.of(DiscoveryNode.Role.MASTER);
|
||||
}
|
||||
}
|
||||
String node = "node_" + i;
|
||||
builder.add(new DiscoveryNode(node, id, buildNewFakeTransportAddress(), emptyMap(), roles, randomVersion(random())));
|
||||
}
|
||||
builder.masterNodeId(masterNodeId); // Node 0 is always a master node
|
||||
|
||||
for (int i = 0; i < clientNodes; i++) {
|
||||
String node = "client_" + i;
|
||||
builder.add(new DiscoveryNode(node, randomAlphaOfLength(10) + "__" + i, buildNewFakeTransportAddress(), emptyMap(),
|
||||
EnumSet.noneOf(DiscoveryNode.Role.class), randomVersion(random())));
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static MetaData randomMetaData(IndexTemplateMetaData... templates) {
|
||||
MetaData.Builder builder = MetaData.builder();
|
||||
for (IndexTemplateMetaData template : templates) {
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.cluster.routing;
|
||||
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.replication.ClusterStateCreationUtils;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
|
@ -27,6 +26,7 @@ import org.elasticsearch.cluster.service.ClusterService;
|
|||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.node.ResponseCollectorService;
|
||||
|
@ -530,7 +530,6 @@ public class OperationRoutingTests extends ESTestCase{
|
|||
indexNames[i] = "test" + i;
|
||||
}
|
||||
ClusterState state = ClusterStateCreationUtils.stateWithAssignedPrimariesAndReplicas(indexNames, numShards, numReplicas);
|
||||
final int numRepeatedSearches = 4;
|
||||
OperationRouting opRouting = new OperationRouting(Settings.EMPTY,
|
||||
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
|
||||
opRouting.setUseAdaptiveReplicaSelection(true);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.cluster.routing.allocation;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest;
|
||||
|
@ -149,8 +150,6 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase {
|
|||
assertTrue(state.metaData().hasIndex(name));
|
||||
}
|
||||
|
||||
ClusterState previousState = state;
|
||||
|
||||
logger.info("--> starting shards");
|
||||
state = cluster.applyStartedShards(state, state.getRoutingNodes().shardsWithState(INITIALIZING));
|
||||
logger.info("--> starting replicas a random number of times");
|
||||
|
|
|
@ -91,10 +91,6 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase {
|
|||
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(2))
|
||||
.build();
|
||||
|
||||
RoutingTable routingTable = RoutingTable.builder()
|
||||
.addAsNew(metaData.index("test"))
|
||||
.build();
|
||||
|
||||
RoutingTable initialRoutingTable = RoutingTable.builder()
|
||||
.addAsNew(metaData.index("test"))
|
||||
.build();
|
||||
|
|
|
@ -86,7 +86,6 @@ public class RandomAllocationDeciderTests extends ESAllocationTestCase {
|
|||
int nodeIdCounter = 0;
|
||||
int atMostNodes = scaledRandomIntBetween(Math.max(1, maxNumReplicas), 15);
|
||||
final boolean frequentNodes = randomBoolean();
|
||||
AllocationService.CommandsResult routingResult;
|
||||
for (int i = 0; i < numIters; i++) {
|
||||
logger.info("Start iteration [{}]", i);
|
||||
ClusterState.Builder stateBuilder = ClusterState.builder(clusterState);
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.cluster.ClusterState;
|
|||
import org.elasticsearch.cluster.ESAllocationTestCase;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||
import org.elasticsearch.cluster.routing.RoutingNodes;
|
||||
|
@ -35,9 +34,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllo
|
|||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||
|
@ -238,7 +235,6 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase {
|
|||
|
||||
logger.info("Adding " + (numberOfIndices / 2) + " nodes");
|
||||
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder();
|
||||
List<DiscoveryNode> nodes = new ArrayList<>();
|
||||
for (int i = 0; i < (numberOfIndices / 2); i++) {
|
||||
nodesBuilder.add(newNode("node" + i));
|
||||
}
|
||||
|
|
|
@ -666,7 +666,6 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase {
|
|||
}
|
||||
|
||||
public void testFreeDiskPercentageAfterShardAssigned() {
|
||||
RoutingNode rn = new RoutingNode("node1", newNode("node1"));
|
||||
DiskThresholdDecider decider = makeDecider(Settings.EMPTY);
|
||||
|
||||
Map<String, DiskUsage> usages = new HashMap<>();
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.test.ESTestCase;
|
|||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
/**
|
||||
* Implements exponentially weighted moving averages (commonly abbreviated EWMA) for a single value.
|
||||
|
@ -41,19 +40,11 @@ public class ExponentiallyWeightedMovingAverageTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testInvalidAlpha() {
|
||||
try {
|
||||
ExponentiallyWeightedMovingAverage ewma = new ExponentiallyWeightedMovingAverage(-0.5, 10);
|
||||
fail("should have failed to create EWMA");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1"));
|
||||
}
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new ExponentiallyWeightedMovingAverage(-0.5, 10));
|
||||
assertThat(ex.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1"));
|
||||
|
||||
try {
|
||||
ExponentiallyWeightedMovingAverage ewma = new ExponentiallyWeightedMovingAverage(1.5, 10);
|
||||
fail("should have failed to create EWMA");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1"));
|
||||
}
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new ExponentiallyWeightedMovingAverage(1.5, 10));
|
||||
assertThat(ex.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1"));
|
||||
}
|
||||
|
||||
public void testConvergingToValue() {
|
||||
|
|
|
@ -439,10 +439,7 @@ public class SettingsTests extends ESTestCase {
|
|||
|
||||
Settings filteredSettings = builder.build().filter((k) -> false);
|
||||
assertEquals(0, filteredSettings.size());
|
||||
for (String k : filteredSettings.keySet()) {
|
||||
fail("no element");
|
||||
|
||||
}
|
||||
assertFalse(filteredSettings.keySet().contains("a.c"));
|
||||
assertFalse(filteredSettings.keySet().contains("a"));
|
||||
assertFalse(filteredSettings.keySet().contains("a.b"));
|
||||
|
|
|
@ -39,7 +39,8 @@ public class AsyncIOProcessorTests extends ESTestCase {
|
|||
protected void write(List<Tuple<Object, Consumer<Exception>>> candidates) throws IOException {
|
||||
if (blockInternal) {
|
||||
synchronized (this) {
|
||||
for (Tuple<Object, Consumer<Exception>> c :candidates) {
|
||||
// TODO: check why we need a loop, can't we just use received.addAndGet(candidates.size())
|
||||
for (int i = 0; i < candidates.size(); i++) {
|
||||
received.incrementAndGet();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,14 +19,14 @@
|
|||
|
||||
package org.elasticsearch.common.util.iterable;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.object.HasToString.hasToString;
|
||||
|
||||
public class IterablesTests extends ESTestCase {
|
||||
|
@ -64,7 +64,7 @@ public class IterablesTests extends ESTestCase {
|
|||
|
||||
Iterable<Integer> allInts = Iterables.flatten(list);
|
||||
int count = 0;
|
||||
for(int x : allInts) {
|
||||
for(@SuppressWarnings("unused") int x : allInts) {
|
||||
count++;
|
||||
}
|
||||
assertEquals(0, count);
|
||||
|
@ -73,14 +73,14 @@ public class IterablesTests extends ESTestCase {
|
|||
|
||||
// changes to the outer list are not seen since flatten pre-caches outer list on init:
|
||||
count = 0;
|
||||
for(int x : allInts) {
|
||||
for(@SuppressWarnings("unused") int x : allInts) {
|
||||
count++;
|
||||
}
|
||||
assertEquals(0, count);
|
||||
|
||||
// but changes to the original inner lists are seen:
|
||||
list.get(0).add(0);
|
||||
for(int x : allInts) {
|
||||
for(@SuppressWarnings("unused") int x : allInts) {
|
||||
count++;
|
||||
}
|
||||
assertEquals(1, count);
|
||||
|
|
|
@ -87,6 +87,7 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
|
|||
|
||||
private boolean disableBeforeIndexDeletion;
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
@ -240,7 +241,6 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
|
|||
|
||||
public ServiceDisruptionScheme addRandomDisruptionScheme() {
|
||||
// TODO: add partial partitions
|
||||
NetworkDisruption p;
|
||||
final DisruptedLinks disruptedLinks;
|
||||
if (randomBoolean()) {
|
||||
disruptedLinks = TwoPartitions.random(random(), internalCluster().getNodeNames());
|
||||
|
|
|
@ -364,7 +364,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase {
|
|||
public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Exception {
|
||||
// don't use DEFAULT settings (which can cause node disconnects on a slow CI machine)
|
||||
configureCluster(Settings.EMPTY, 3, null, 1);
|
||||
final String masterNode = internalCluster().startMasterOnlyNode();
|
||||
internalCluster().startMasterOnlyNode();
|
||||
final String node_1 = internalCluster().startDataOnlyNode();
|
||||
|
||||
logger.info("--> creating index [test] with one shard and on replica");
|
||||
|
|
|
@ -103,7 +103,6 @@ public class MetaDataStateFormatTests extends ESTestCase {
|
|||
final long id = addDummyFiles("foo-", dirs);
|
||||
Format format = new Format("foo-");
|
||||
DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean());
|
||||
int version = between(0, Integer.MAX_VALUE/2);
|
||||
format.write(state, dirs);
|
||||
for (Path file : dirs) {
|
||||
Path[] list = content("*", file);
|
||||
|
@ -117,7 +116,6 @@ public class MetaDataStateFormatTests extends ESTestCase {
|
|||
DummyState read = format.read(NamedXContentRegistry.EMPTY, list[0]);
|
||||
assertThat(read, equalTo(state));
|
||||
}
|
||||
final int version2 = between(version, Integer.MAX_VALUE);
|
||||
DummyState state2 = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean());
|
||||
format.write(state2, dirs);
|
||||
|
||||
|
@ -145,7 +143,6 @@ public class MetaDataStateFormatTests extends ESTestCase {
|
|||
|
||||
Format format = new Format("foo-");
|
||||
DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean());
|
||||
int version = between(0, Integer.MAX_VALUE/2);
|
||||
format.write(state, dirs);
|
||||
for (Path file : dirs) {
|
||||
Path[] list = content("*", file);
|
||||
|
@ -169,7 +166,6 @@ public class MetaDataStateFormatTests extends ESTestCase {
|
|||
final long id = addDummyFiles("foo-", dirs);
|
||||
Format format = new Format("foo-");
|
||||
DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean());
|
||||
int version = between(0, Integer.MAX_VALUE/2);
|
||||
format.write(state, dirs);
|
||||
for (Path file : dirs) {
|
||||
Path[] list = content("*", file);
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.LogByteSizeMergePolicy;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
|
@ -38,9 +37,9 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -89,7 +88,6 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
|||
|
||||
DirectoryReader reader = DirectoryReader.open(writer);
|
||||
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0));
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, new BitsetFilterCache.Listener() {
|
||||
@Override
|
||||
|
@ -114,7 +112,6 @@ public class BitSetFilterCacheTests extends ESTestCase {
|
|||
reader.close();
|
||||
reader = DirectoryReader.open(writer);
|
||||
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0));
|
||||
searcher = new IndexSearcher(reader);
|
||||
|
||||
assertThat(matchCount(filter, reader), equalTo(3));
|
||||
|
||||
|
|
|
@ -19,42 +19,9 @@
|
|||
|
||||
package org.elasticsearch.index.engine;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Queue;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.BrokenBarrierException;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.CyclicBarrier;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.LongSupplier;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.function.ToLongBiFunction;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.LongStream;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -162,6 +129,40 @@ import org.elasticsearch.test.IndexSettingsModule;
|
|||
import org.hamcrest.MatcherAssert;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Queue;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.BrokenBarrierException;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.CyclicBarrier;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.LongSupplier;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.function.ToLongBiFunction;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.LongStream;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.shuffle;
|
||||
import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY;
|
||||
|
@ -2863,11 +2864,7 @@ public class InternalEngineTests extends EngineTestCase {
|
|||
IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5),
|
||||
config.getExternalRefreshListener(), config.getInternalRefreshListener(), null,
|
||||
new NoneCircuitBreakerService(), () -> SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm::get, tombstoneDocSupplier());
|
||||
try {
|
||||
InternalEngine internalEngine = new InternalEngine(brokenConfig);
|
||||
fail("translog belongs to a different engine");
|
||||
} catch (EngineCreationFailureException ex) {
|
||||
}
|
||||
expectThrows(EngineCreationFailureException.class, () -> new InternalEngine(brokenConfig));
|
||||
|
||||
engine = createEngine(store, primaryTranslogDir); // and recover again!
|
||||
assertVisibleCount(engine, numDocs, false);
|
||||
|
|
|
@ -37,7 +37,7 @@ public class MapperTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testBuilderContextWithIndexSettingsAsNull() {
|
||||
NullPointerException e = expectThrows(NullPointerException.class, () -> new Mapper.BuilderContext(null, new ContentPath(1)));
|
||||
expectThrows(NullPointerException.class, () -> new Mapper.BuilderContext(null, new ContentPath(1)));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -621,11 +621,6 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testIndexPrefixIndexTypes() throws IOException {
|
||||
QueryShardContext queryShardContext = indexService.newQueryShardContext(
|
||||
randomInt(20), null, () -> {
|
||||
throw new UnsupportedOperationException();
|
||||
}, null);
|
||||
|
||||
{
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field")
|
||||
|
|
|
@ -179,7 +179,6 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTe
|
|||
public void testCommonTermsQuery4() throws IOException {
|
||||
Query parsedQuery = parseQuery(commonTermsQuery("field", "text")).toQuery(createShardContext());
|
||||
assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class));
|
||||
ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery;
|
||||
}
|
||||
|
||||
public void testParseFailsWithMultipleFields() throws IOException {
|
||||
|
|
|
@ -371,7 +371,8 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||
|
||||
private void assertGeoBoundingBoxQuery(String query) throws IOException {
|
||||
QueryShardContext shardContext = createShardContext();
|
||||
Query parsedQuery = parseQuery(query).toQuery(shardContext);
|
||||
// just check if we can parse the query
|
||||
parseQuery(query).toQuery(shardContext);
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
|
|
@ -298,9 +298,10 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
|
|||
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.DEFAULT);
|
||||
}
|
||||
|
||||
private void assertGeoDistanceRangeQuery(String query, double lat, double lon, double distance, DistanceUnit distanceUnit) throws IOException {
|
||||
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
|
||||
// TODO: what can we check?
|
||||
private void assertGeoDistanceRangeQuery(String query, double lat, double lon, double distance, DistanceUnit distanceUnit)
|
||||
throws IOException {
|
||||
parseQuery(query).toQuery(createShardContext());
|
||||
// TODO: what can we check? See https://github.com/elastic/elasticsearch/issues/34043
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
|
|
@ -197,8 +197,8 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
|
||||
private void assertGeoPolygonQuery(String query) throws IOException {
|
||||
QueryShardContext context = createShardContext();
|
||||
Query parsedQuery = parseQuery(query).toQuery(context);
|
||||
// todo LatLonPointInPolygon is package private, need a closeTo check on the query
|
||||
parseQuery(query).toQuery(context);
|
||||
// TODO LatLonPointInPolygon is package private, need a closeTo check on the query
|
||||
// since some points can be computed from the geohash
|
||||
}
|
||||
|
||||
|
|
|
@ -34,10 +34,10 @@ import org.apache.lucene.search.MatchAllDocsQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RandomApproximationQuery;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Accountable;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -46,6 +46,7 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
||||
import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction;
|
||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery.FilterScoreFunction;
|
||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery.ScoreMode;
|
||||
import org.elasticsearch.common.lucene.search.function.LeafScoreFunction;
|
||||
import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
|
||||
|
@ -71,7 +72,6 @@ import java.util.concurrent.ExecutionException;
|
|||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.hamcrest.core.IsEqual.equalTo;
|
||||
import static org.elasticsearch.common.lucene.search.function.FunctionScoreQuery.FilterScoreFunction;
|
||||
|
||||
public class FunctionScoreTests extends ESTestCase {
|
||||
|
||||
|
@ -709,13 +709,11 @@ public class FunctionScoreTests extends ESTestCase {
|
|||
|
||||
public void testExplanationAndScoreEqualsEvenIfNoFunctionMatches() throws IOException {
|
||||
IndexSearcher localSearcher = newSearcher(reader);
|
||||
ScoreMode scoreMode = randomFrom(new
|
||||
ScoreMode[]{ScoreMode.SUM, ScoreMode.AVG, ScoreMode.FIRST, ScoreMode.MIN, ScoreMode.MAX, ScoreMode.MULTIPLY});
|
||||
CombineFunction combineFunction = randomFrom(new
|
||||
CombineFunction[]{CombineFunction.SUM, CombineFunction.AVG, CombineFunction.MIN, CombineFunction.MAX,
|
||||
CombineFunction.MULTIPLY, CombineFunction.REPLACE});
|
||||
|
||||
// check for document that has no macthing function
|
||||
// check for document that has no matching function
|
||||
FunctionScoreQuery query = new FunctionScoreQuery(new TermQuery(new Term(FIELD, "out")),
|
||||
new FilterScoreFunction(new TermQuery(new Term("_uid", "2")), new WeightFactorFunction(10)),
|
||||
combineFunction, Float.NEGATIVE_INFINITY, Float.MAX_VALUE);
|
||||
|
|
|
@ -485,7 +485,6 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
final Result result = indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED));
|
||||
|
||||
final int maxSeqNo = result.maxSeqNo;
|
||||
final boolean gap = result.gap;
|
||||
|
||||
// promote the replica
|
||||
final ShardRouting replicaRouting = indexShard.routingEntry();
|
||||
|
@ -1780,7 +1779,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
public void testRecoverFromStoreWithNoOps() throws IOException {
|
||||
final IndexShard shard = newStartedShard(true);
|
||||
indexDoc(shard, "_doc", "0");
|
||||
Engine.IndexResult test = indexDoc(shard, "_doc", "1");
|
||||
indexDoc(shard, "_doc", "1");
|
||||
// start a replica shard and index the second doc
|
||||
final IndexShard otherShard = newStartedShard(false);
|
||||
updateMappings(otherShard, shard.indexSettings().getIndexMetaData());
|
||||
|
@ -2877,12 +2876,10 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
class Result {
|
||||
private final int localCheckpoint;
|
||||
private final int maxSeqNo;
|
||||
private final boolean gap;
|
||||
|
||||
Result(final int localCheckpoint, final int maxSeqNo, final boolean gap) {
|
||||
Result(final int localCheckpoint, final int maxSeqNo) {
|
||||
this.localCheckpoint = localCheckpoint;
|
||||
this.maxSeqNo = maxSeqNo;
|
||||
this.gap = gap;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2921,7 +2918,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
}
|
||||
assert localCheckpoint == indexShard.getLocalCheckpoint();
|
||||
assert !gap || (localCheckpoint != max);
|
||||
return new Result(localCheckpoint, max, gap);
|
||||
return new Result(localCheckpoint, max);
|
||||
}
|
||||
|
||||
/** A dummy repository for testing which just needs restore overridden */
|
||||
|
|
|
@ -39,11 +39,11 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.routing.OperationRouting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
|
@ -142,7 +142,6 @@ public class StoreRecoveryTests extends ESTestCase {
|
|||
} else {
|
||||
indexSort = null;
|
||||
}
|
||||
int id = 0;
|
||||
IndexWriterConfig iwc = newIndexWriterConfig()
|
||||
.setMergePolicy(NoMergePolicy.INSTANCE)
|
||||
.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
|
||||
|
|
|
@ -364,10 +364,8 @@ public class StoreTests extends ESTestCase {
|
|||
|
||||
}
|
||||
|
||||
final long luceneChecksum;
|
||||
try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) {
|
||||
assertEquals(luceneFileLength, indexInput.length());
|
||||
luceneChecksum = CodecUtil.retrieveChecksum(indexInput);
|
||||
}
|
||||
|
||||
dir.close();
|
||||
|
@ -663,7 +661,6 @@ public class StoreTests extends ESTestCase {
|
|||
if (randomBoolean()) {
|
||||
store.cleanupAndVerify("test", firstMeta);
|
||||
String[] strings = store.directory().listAll();
|
||||
int numChecksums = 0;
|
||||
int numNotFound = 0;
|
||||
for (String file : strings) {
|
||||
if (file.startsWith("extra")) {
|
||||
|
@ -679,7 +676,6 @@ public class StoreTests extends ESTestCase {
|
|||
} else {
|
||||
store.cleanupAndVerify("test", secondMeta);
|
||||
String[] strings = store.directory().listAll();
|
||||
int numChecksums = 0;
|
||||
int numNotFound = 0;
|
||||
for (String file : strings) {
|
||||
if (file.startsWith("extra")) {
|
||||
|
|
|
@ -639,12 +639,8 @@ public class TranslogTests extends ESTestCase {
|
|||
assertTrue(Files.exists(translogDir.resolve(Translog.getFilename(1))));
|
||||
translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[]{1}));
|
||||
translog.close();
|
||||
try {
|
||||
Translog.Snapshot snapshot = translog.newSnapshot();
|
||||
fail("translog is closed");
|
||||
} catch (AlreadyClosedException ex) {
|
||||
assertEquals(ex.getMessage(), "translog is already closed");
|
||||
}
|
||||
AlreadyClosedException ex = expectThrows(AlreadyClosedException.class, () -> translog.newSnapshot());
|
||||
assertEquals(ex.getMessage(), "translog is already closed");
|
||||
}
|
||||
|
||||
public void testSnapshotFromMinGen() throws Exception {
|
||||
|
@ -845,7 +841,7 @@ public class TranslogTests extends ESTestCase {
|
|||
|
||||
try (Translog translog = openTranslog(config, uuid)) {
|
||||
try (Translog.Snapshot snapshot = translog.newSnapshot()) {
|
||||
for (Location loc : locations) {
|
||||
for (int i = 0; i < locations.size(); i++) {
|
||||
snapshot.next();
|
||||
}
|
||||
}
|
||||
|
@ -871,7 +867,7 @@ public class TranslogTests extends ESTestCase {
|
|||
|
||||
AtomicInteger truncations = new AtomicInteger(0);
|
||||
try (Translog.Snapshot snap = translog.newSnapshot()) {
|
||||
for (Translog.Location location : locations) {
|
||||
for (int i = 0; i < locations.size(); i++) {
|
||||
try {
|
||||
assertNotNull(snap.next());
|
||||
} catch (EOFException e) {
|
||||
|
@ -2378,6 +2374,7 @@ public class TranslogTests extends ESTestCase {
|
|||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int write(ByteBuffer src) throws IOException {
|
||||
if (fail.fail()) {
|
||||
if (partialWrite) {
|
||||
|
@ -2486,14 +2483,9 @@ public class TranslogTests extends ESTestCase {
|
|||
// don't copy the new file
|
||||
Files.createFile(config.getTranslogPath().resolve("translog-" + (read.generation + 1) + ".tlog"));
|
||||
|
||||
try {
|
||||
Translog tlog = new Translog(config, translog.getTranslogUUID(), translog.getDeletionPolicy(), () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get);
|
||||
fail("file already exists?");
|
||||
} catch (TranslogException ex) {
|
||||
// all is well
|
||||
assertEquals(ex.getMessage(), "failed to create new translog file");
|
||||
assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class);
|
||||
}
|
||||
TranslogException ex = expectThrows(TranslogException.class, () -> new Translog(config, translog.getTranslogUUID(), translog.getDeletionPolicy(), () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get));
|
||||
assertEquals(ex.getMessage(), "failed to create new translog file");
|
||||
assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class);
|
||||
}
|
||||
|
||||
public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException {
|
||||
|
@ -2521,14 +2513,10 @@ public class TranslogTests extends ESTestCase {
|
|||
tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
|
||||
}
|
||||
|
||||
try {
|
||||
Translog tlog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get);
|
||||
fail("file already exists?");
|
||||
} catch (TranslogException ex) {
|
||||
// all is well
|
||||
assertEquals(ex.getMessage(), "failed to create new translog file");
|
||||
assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class);
|
||||
}
|
||||
TranslogException ex = expectThrows(TranslogException.class,
|
||||
() -> new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get));
|
||||
assertEquals(ex.getMessage(), "failed to create new translog file");
|
||||
assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -295,7 +295,6 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase {
|
|||
.put("indices.memory.index_buffer_size", "4mb").build());
|
||||
IndexShard shard0 = test.getShard(0);
|
||||
IndexShard shard1 = test.getShard(1);
|
||||
IndexShard shard2 = test.getShard(2);
|
||||
controller.simulateIndexing(shard0);
|
||||
controller.simulateIndexing(shard0);
|
||||
controller.simulateIndexing(shard0);
|
||||
|
|
|
@ -324,7 +324,6 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC
|
|||
* Mock for {@link IndexShard}
|
||||
*/
|
||||
protected class MockIndexShard implements IndicesClusterStateService.Shard {
|
||||
private volatile long clusterStateVersion;
|
||||
private volatile ShardRouting shardRouting;
|
||||
private volatile RecoveryState recoveryState;
|
||||
private volatile Set<String> inSyncAllocationIds;
|
||||
|
@ -372,7 +371,6 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC
|
|||
this.shardRouting = shardRouting;
|
||||
if (shardRouting.primary()) {
|
||||
term = newPrimaryTerm;
|
||||
this.clusterStateVersion = applyingClusterStateVersion;
|
||||
this.inSyncAllocationIds = inSyncAllocationIds;
|
||||
this.routingTable = routingTable;
|
||||
}
|
||||
|
|
|
@ -74,7 +74,6 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
|||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
|
||||
|
||||
int recCount = randomIntBetween(200, 600);
|
||||
int numberOfTypes = randomIntBetween(1, 5);
|
||||
List<IndexRequestBuilder> indexRequests = new ArrayList<>();
|
||||
for (int rec = 0; rec < recCount; rec++) {
|
||||
String type = "type";
|
||||
|
|
|
@ -57,7 +57,6 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
|||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
import org.elasticsearch.test.InternalTestCluster;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.elasticsearch.test.store.MockFSDirectoryService;
|
||||
import org.elasticsearch.test.store.MockFSIndexStore;
|
||||
import org.elasticsearch.test.transport.MockTransportService;
|
||||
import org.elasticsearch.test.transport.StubbableTransport;
|
||||
|
@ -550,7 +549,6 @@ public class IndexRecoveryIT extends ESIntegTestCase {
|
|||
final Settings nodeSettings = Settings.builder()
|
||||
.put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), "100ms")
|
||||
.put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), "1s")
|
||||
.put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) // restarted recoveries will delete temp files and write them again
|
||||
.build();
|
||||
// start a master node
|
||||
internalCluster().startNode(nodeSettings);
|
||||
|
|
|
@ -94,7 +94,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testIndexCleanup() throws Exception {
|
||||
final String masterNode = internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false));
|
||||
internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false));
|
||||
final String node_1 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false));
|
||||
final String node_2 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false));
|
||||
logger.info("--> creating index [test] with one shard and on replica");
|
||||
|
@ -325,7 +325,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testShardActiveElsewhereDoesNotDeleteAnother() throws Exception {
|
||||
final String masterNode = internalCluster().startMasterOnlyNode();
|
||||
internalCluster().startMasterOnlyNode();
|
||||
final List<String> nodes = internalCluster().startDataOnlyNodes(4);
|
||||
|
||||
final String node1 = nodes.get(0);
|
||||
|
|
|
@ -31,16 +31,16 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment;
|
||||
import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask;
|
||||
import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestParams;
|
||||
import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestPersistentTasksExecutor;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.tasks.TaskManager;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment;
|
||||
import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask;
|
||||
import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestParams;
|
||||
import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestPersistentTasksExecutor;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -334,13 +334,11 @@ public class PersistentTasksNodeServiceTests extends ESTestCase {
|
|||
private final PersistentTaskParams params;
|
||||
private final AllocatedPersistentTask task;
|
||||
private final PersistentTaskState state;
|
||||
private final PersistentTasksExecutor<?> holder;
|
||||
|
||||
Execution(PersistentTaskParams params, AllocatedPersistentTask task, PersistentTaskState state, PersistentTasksExecutor<?> holder) {
|
||||
Execution(PersistentTaskParams params, AllocatedPersistentTask task, PersistentTaskState state) {
|
||||
this.params = params;
|
||||
this.task = task;
|
||||
this.state = state;
|
||||
this.holder = holder;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -356,7 +354,7 @@ public class PersistentTasksNodeServiceTests extends ESTestCase {
|
|||
final PersistentTaskState state,
|
||||
final AllocatedPersistentTask task,
|
||||
final PersistentTasksExecutor<Params> executor) {
|
||||
executions.add(new Execution(params, task, state, executor));
|
||||
executions.add(new Execution(params, task, state));
|
||||
}
|
||||
|
||||
public Execution get(int i) {
|
||||
|
|
|
@ -148,7 +148,7 @@ public class FullRollingRestartIT extends ESIntegTestCase {
|
|||
}
|
||||
internalCluster().restartRandomDataNode();
|
||||
ensureGreen();
|
||||
ClusterState afterState = client().admin().cluster().prepareState().get().getState();
|
||||
client().admin().cluster().prepareState().get().getState();
|
||||
|
||||
recoveryResponse = client().admin().indices().prepareRecoveries("test").get();
|
||||
for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) {
|
||||
|
|
|
@ -54,7 +54,6 @@ public class RepositoriesServiceIT extends ESIntegTestCase {
|
|||
final Client client = client();
|
||||
final RepositoriesService repositoriesService =
|
||||
cluster.getDataOrMasterNodeInstances(RepositoriesService.class).iterator().next();
|
||||
final Settings settings = cluster.getDefaultSettings();
|
||||
|
||||
final Settings.Builder repoSettings = Settings.builder().put("location", randomRepoPath());
|
||||
|
||||
|
|
|
@ -212,7 +212,6 @@ public class RestControllerTests extends ESTestCase {
|
|||
};
|
||||
final RestController restController = new RestController(Settings.EMPTY, Collections.emptySet(), wrapper, null,
|
||||
circuitBreakerService, usageService);
|
||||
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
||||
restController.dispatchRequest(new FakeRestRequest.Builder(xContentRegistry()).build(), null, null, Optional.of(handler));
|
||||
assertTrue(wrapperCalled.get());
|
||||
assertFalse(handlerCalled.get());
|
||||
|
|
|
@ -40,7 +40,6 @@ import java.util.stream.Collectors;
|
|||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class RestHttpResponseHeadersTests extends ESTestCase {
|
||||
|
||||
|
@ -114,7 +113,6 @@ public class RestHttpResponseHeadersTests extends ESTestCase {
|
|||
|
||||
// Send the request and verify the response status code
|
||||
FakeRestChannel restChannel = new FakeRestChannel(restRequest, false, 1);
|
||||
NodeClient client = mock(NodeClient.class);
|
||||
restController.dispatchRequest(restRequest, restChannel, new ThreadContext(Settings.EMPTY));
|
||||
assertThat(restChannel.capturedResponse().status().getStatus(), is(405));
|
||||
|
||||
|
|
|
@ -35,7 +35,6 @@ public class StoredScriptSourceTests extends AbstractSerializingTestCase<StoredS
|
|||
|
||||
@Override
|
||||
protected StoredScriptSource createTestInstance() {
|
||||
String lang = randomAlphaOfLengthBetween(1, 20);
|
||||
XContentType xContentType = randomFrom(XContentType.JSON, XContentType.YAML);
|
||||
try {
|
||||
XContentBuilder template = XContentBuilder.builder(xContentType.xContent());
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.MultiBucketConsumerService.TooManyBucketsException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -40,8 +41,6 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.elasticsearch.search.aggregations.MultiBucketConsumerService.TooManyBucketsException;
|
||||
|
||||
public class DateHistogramAggregatorTests extends AggregatorTestCase {
|
||||
|
||||
private static final String DATE_FIELD = "date";
|
||||
|
@ -346,19 +345,19 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase {
|
|||
"2017-01-01T00:00:00.000Z"
|
||||
);
|
||||
|
||||
TooManyBucketsException exc = expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps,
|
||||
expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps,
|
||||
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD),
|
||||
histogram -> {}, 2));
|
||||
|
||||
exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
|
||||
expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
|
||||
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD),
|
||||
histogram -> {}, 2));
|
||||
|
||||
exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
|
||||
expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
|
||||
aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L),
|
||||
histogram -> {}, 100));
|
||||
|
||||
exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
|
||||
expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps,
|
||||
aggregation ->
|
||||
aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5))
|
||||
.field(DATE_FIELD)
|
||||
|
|
|
@ -439,8 +439,7 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testBadSigmaAsSubAgg() throws Exception {
|
||||
try {
|
||||
SearchResponse response = client()
|
||||
Exception ex = expectThrows(Exception.class, () -> client()
|
||||
.prepareSearch("idx")
|
||||
.addAggregation(
|
||||
terms("terms")
|
||||
|
@ -451,21 +450,18 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase {
|
|||
.extendedBounds(minRandomValue, maxRandomValue)
|
||||
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
|
||||
.subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum")
|
||||
.sigma(-1.0))).execute().actionGet();
|
||||
fail("Illegal sigma was provided but no exception was thrown.");
|
||||
} catch (Exception e) {
|
||||
Throwable cause = ExceptionsHelper.unwrapCause(e);
|
||||
if (cause == null) {
|
||||
throw e;
|
||||
} else if (cause instanceof SearchPhaseExecutionException) {
|
||||
SearchPhaseExecutionException spee = (SearchPhaseExecutionException) e;
|
||||
Throwable rootCause = spee.getRootCause();
|
||||
if (!(rootCause instanceof IllegalArgumentException)) {
|
||||
throw e;
|
||||
}
|
||||
} else if (!(cause instanceof IllegalArgumentException)) {
|
||||
throw e;
|
||||
.sigma(-1.0))).execute().actionGet());
|
||||
Throwable cause = ExceptionsHelper.unwrapCause(ex);
|
||||
if (cause == null) {
|
||||
throw ex;
|
||||
} else if (cause instanceof SearchPhaseExecutionException) {
|
||||
SearchPhaseExecutionException spee = (SearchPhaseExecutionException) ex;
|
||||
Throwable rootCause = spee.getRootCause();
|
||||
if (!(rootCause instanceof IllegalArgumentException)) {
|
||||
throw ex;
|
||||
}
|
||||
} else if (!(cause instanceof IllegalArgumentException)) {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder;
|
|||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.collect.EvictingQueue;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
|
||||
|
@ -411,7 +412,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts","_count")
|
||||
.window(windowSize)
|
||||
|
@ -459,7 +460,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(windowSize)
|
||||
|
@ -507,7 +508,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(windowSize)
|
||||
|
@ -555,7 +556,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(windowSize)
|
||||
|
@ -604,7 +605,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(windowSize)
|
||||
|
@ -708,7 +709,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(randomMetric("the_metric", VALUE_FIELD))
|
||||
.subAggregation(movingAvg("movavg_counts", "the_metric")
|
||||
.window(0)
|
||||
|
@ -746,7 +747,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(randomMetric("the_metric", VALUE_FIELD))
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(-10)
|
||||
|
@ -810,7 +811,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(randomMetric("the_metric", VALUE_FIELD))
|
||||
.subAggregation(movingAvg("movavg_counts", "the_metric")
|
||||
.window(windowSize)
|
||||
|
@ -831,7 +832,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(randomMetric("the_metric", VALUE_FIELD))
|
||||
.subAggregation(movingAvg("movavg_counts", "the_metric")
|
||||
.window(windowSize)
|
||||
|
@ -847,12 +848,11 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testHoltWintersNotEnoughData() {
|
||||
try {
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("idx").setTypes("type")
|
||||
Client client = client();
|
||||
expectThrows(SearchPhaseExecutionException.class, () -> client.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(10)
|
||||
|
@ -864,11 +864,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.modelBuilder(new HoltWintersModel.HoltWintersModelBuilder()
|
||||
.alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType))
|
||||
.gapPolicy(gapPolicy))
|
||||
).execute().actionGet();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
// All good
|
||||
}
|
||||
|
||||
).execute().actionGet());
|
||||
}
|
||||
|
||||
public void testTwoMovAvgsWithPredictions() {
|
||||
|
@ -982,23 +978,19 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/34046")
|
||||
public void testBadModelParams() {
|
||||
try {
|
||||
SearchResponse response = client()
|
||||
expectThrows(SearchPhaseExecutionException.class, () -> client()
|
||||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(10)
|
||||
.modelBuilder(randomModelBuilder(100))
|
||||
.gapPolicy(gapPolicy))
|
||||
).execute().actionGet();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
// All good
|
||||
}
|
||||
|
||||
).execute().actionGet());
|
||||
}
|
||||
|
||||
public void testHoltWintersMinimization() {
|
||||
|
@ -1006,7 +998,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(windowSize)
|
||||
|
@ -1092,7 +1084,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(numBuckets)
|
||||
|
@ -1146,7 +1138,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(numBuckets)
|
||||
|
@ -1164,7 +1156,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(numBuckets)
|
||||
|
@ -1194,7 +1186,7 @@ public class MovAvgIT extends ESIntegTestCase {
|
|||
.prepareSearch("idx").setTypes("type")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD).interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.extendedBounds(0L, interval * (numBuckets - 1))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(movingAvg("movavg_counts", "_count")
|
||||
.window(numBuckets)
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
|||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -213,7 +212,6 @@ public class MoreLikeThisIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMoreLikeThisIssue2197() throws Exception {
|
||||
Client client = client();
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar")
|
||||
.startObject("properties")
|
||||
.endObject()
|
||||
|
|
|
@ -1744,7 +1744,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).getId(), is("3"));
|
||||
|
||||
// When we use long values, it means we have ms since epoch UTC based so we don't apply any transformation
|
||||
Exception e = expectThrows(SearchPhaseExecutionException.class, () ->
|
||||
expectThrows(SearchPhaseExecutionException.class, () ->
|
||||
client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.rangeQuery("date").from(1388534400000L).to(1388537940999L).timeZone("+01:00"))
|
||||
.get());
|
||||
|
|
|
@ -201,12 +201,10 @@ public class QueryRescorerBuilderTests extends ESTestCase {
|
|||
rescoreBuilder.setRescoreQueryWeight(randomFloat());
|
||||
rescoreBuilder.setScoreMode(QueryRescoreMode.Max);
|
||||
|
||||
QueryRescoreContext rescoreContext = (QueryRescoreContext) rescoreBuilder.buildContext(mockShardContext);
|
||||
QueryRescorerBuilder rescoreRewritten = rescoreBuilder.rewrite(mockShardContext);
|
||||
assertEquals(rescoreRewritten.getQueryWeight(), rescoreBuilder.getQueryWeight(), 0.01f);
|
||||
assertEquals(rescoreRewritten.getRescoreQueryWeight(), rescoreBuilder.getRescoreQueryWeight(), 0.01f);
|
||||
assertEquals(rescoreRewritten.getScoreMode(), rescoreBuilder.getScoreMode());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -972,11 +972,8 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
assertSuggestionSize(searchSuggest, 0, 25480, "title"); // Just to prove that we've run through a ton of options
|
||||
|
||||
suggest.size(1);
|
||||
long start = System.currentTimeMillis();
|
||||
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest);
|
||||
long total = System.currentTimeMillis() - start;
|
||||
assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006");
|
||||
// assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging
|
||||
}
|
||||
|
||||
public void testSuggestWithFieldAlias() throws Exception {
|
||||
|
@ -1168,7 +1165,7 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
.endObject()
|
||||
.endObject());
|
||||
|
||||
PhraseSuggestionBuilder in = suggest.collateQuery(filterStr);
|
||||
suggest.collateQuery(filterStr);
|
||||
try {
|
||||
searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion);
|
||||
fail("Post filter error has been swallowed");
|
||||
|
@ -1186,7 +1183,6 @@ public class SuggestSearchIT extends ESIntegTestCase {
|
|||
.endObject());
|
||||
|
||||
|
||||
PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateQuery(collateWithParams);
|
||||
try {
|
||||
searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion);
|
||||
fail("Malformed query (lack of additional params) should fail");
|
||||
|
|
|
@ -45,7 +45,6 @@ import static org.hamcrest.Matchers.instanceOf;
|
|||
public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTestCase<CompletionSuggestionBuilder> {
|
||||
|
||||
private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[] { CompletionSuggestionBuilder.CONTEXTS_FIELD.getPreferredName() };
|
||||
private static final Map<String, List<? extends ToXContent>> contextMap = new HashMap<>();
|
||||
private static String categoryContextName;
|
||||
private static String geoQueryContextName;
|
||||
private static List<ContextMapping<?>> contextMappings = new ArrayList<>();
|
||||
|
|
|
@ -2819,7 +2819,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas
|
|||
|
||||
Predicate<String> isRestorableIndex = index -> corruptedIndex.getName().equals(index) == false;
|
||||
|
||||
RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap")
|
||||
client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap")
|
||||
.setIndices(nbDocsPerIndex.keySet().stream().filter(isRestorableIndex).toArray(String[]::new))
|
||||
.setRestoreGlobalState(randomBoolean())
|
||||
.setWaitForCompletion(true)
|
||||
|
|
|
@ -55,7 +55,7 @@ public class SimpleThreadPoolIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
logger.info("pre node threads are {}", preNodeStartThreadNames);
|
||||
String node = internalCluster().startNode();
|
||||
internalCluster().startNode();
|
||||
logger.info("do some indexing, flushing, optimize, and searches");
|
||||
int numDocs = randomIntBetween(2, 100);
|
||||
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
|
||||
|
|
|
@ -61,7 +61,6 @@ public class UpdateThreadPoolSettingsTests extends ESThreadPoolTestCase {
|
|||
}
|
||||
|
||||
public void testWriteThreadPoolsMaxSize() throws InterruptedException {
|
||||
final String name = Names.WRITE;
|
||||
final int maxSize = 1 + EsExecutors.numberOfProcessors(Settings.EMPTY);
|
||||
final int tooBig = randomIntBetween(1 + maxSize, Integer.MAX_VALUE);
|
||||
|
||||
|
|
|
@ -223,6 +223,7 @@ public class TcpTransportTests extends ESTestCase {
|
|||
|
||||
StreamInput streamIn = reference.streamInput();
|
||||
streamIn.skip(TcpHeader.MARKER_BYTES_SIZE);
|
||||
@SuppressWarnings("unused")
|
||||
int len = streamIn.readInt();
|
||||
long requestId = streamIn.readLong();
|
||||
assertEquals(42, requestId);
|
||||
|
|
|
@ -19,19 +19,6 @@
|
|||
|
||||
package org.elasticsearch.update;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
|
@ -56,6 +43,19 @@ import org.elasticsearch.script.ScriptType;
|
|||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
|
||||
|
@ -586,15 +586,13 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
final class UpdateThread extends Thread {
|
||||
final Map<Integer,Integer> failedMap = new HashMap<>();
|
||||
final int numberOfIds;
|
||||
final int updatesPerId;
|
||||
final int maxUpdateRequests = numberOfIdsPerThread*numberOfUpdatesPerId;
|
||||
final int maxDeleteRequests = numberOfIdsPerThread*numberOfUpdatesPerId;
|
||||
private final Semaphore updateRequestsOutstanding = new Semaphore(maxUpdateRequests);
|
||||
private final Semaphore deleteRequestsOutstanding = new Semaphore(maxDeleteRequests);
|
||||
|
||||
UpdateThread(int numberOfIds, int updatesPerId) {
|
||||
UpdateThread(int numberOfIds) {
|
||||
this.numberOfIds = numberOfIds;
|
||||
this.updatesPerId = updatesPerId;
|
||||
}
|
||||
|
||||
final class UpdateListener implements ActionListener<UpdateResponse> {
|
||||
|
@ -725,7 +723,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
final List<UpdateThread> threads = new ArrayList<>();
|
||||
|
||||
for (int i = 0; i < numberOfThreads; i++) {
|
||||
UpdateThread ut = new UpdateThread(numberOfIdsPerThread, numberOfUpdatesPerId);
|
||||
UpdateThread ut = new UpdateThread(numberOfIdsPerThread);
|
||||
ut.start();
|
||||
threads.add(ut);
|
||||
}
|
||||
|
@ -749,7 +747,7 @@ public class UpdateIT extends ESIntegTestCase {
|
|||
//This means that we add 1 to the expected versions and attempts
|
||||
//All the previous operations should be complete or failed at this point
|
||||
for (int i = 0; i < numberOfIdsPerThread; ++i) {
|
||||
UpdateResponse ur = client().prepareUpdate("test", "type1", Integer.toString(i))
|
||||
client().prepareUpdate("test", "type1", Integer.toString(i))
|
||||
.setScript(fieldIncScript)
|
||||
.setRetryOnConflict(Integer.MAX_VALUE)
|
||||
.setUpsert(jsonBuilder().startObject().field("field", 1).endObject())
|
||||
|
|
|
@ -20,8 +20,8 @@ package org.elasticsearch.versioning;
|
|||
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
|
@ -358,7 +358,6 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
// zero-pad sequential
|
||||
logger.info("--> use zero-padded sequential ids");
|
||||
ids = new IDSource() {
|
||||
final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX);
|
||||
final String zeroPad = String.format(Locale.ROOT, "%0" + TestUtil.nextInt(random, 4, 20) + "d", 0);
|
||||
int upto;
|
||||
|
||||
|
@ -374,7 +373,6 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
logger.info("--> use random long ids");
|
||||
ids = new IDSource() {
|
||||
final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX);
|
||||
int upto;
|
||||
|
||||
@Override
|
||||
public String next() {
|
||||
|
@ -387,8 +385,6 @@ public class SimpleVersioningIT extends ESIntegTestCase {
|
|||
logger.info("--> use zero-padded random long ids");
|
||||
ids = new IDSource() {
|
||||
final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX);
|
||||
final String zeroPad = String.format(Locale.ROOT, "%015d", 0);
|
||||
int upto;
|
||||
|
||||
@Override
|
||||
public String next() {
|
||||
|
|
|
@ -21,9 +21,6 @@ package org.elasticsearch.index.mapper;
|
|||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.similarities.BM25Similarity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
@ -37,8 +34,6 @@ import java.util.List;
|
|||
/** Base test case for subclasses of MappedFieldType */
|
||||
public abstract class FieldTypeTestCase extends ESTestCase {
|
||||
|
||||
private static final Settings INDEX_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
|
||||
|
||||
/** Abstraction for mutating a property of a MappedFieldType */
|
||||
public abstract static class Modifier {
|
||||
/** The name of the property that is being modified. Used in test failure messages. */
|
||||
|
|
|
@ -103,7 +103,7 @@ public abstract class ESBlobStoreContainerTestCase extends ESTestCase {
|
|||
int length = randomIntBetween(10, 100);
|
||||
String name = "bar-0-";
|
||||
generatedBlobs.put(name, (long) length);
|
||||
byte[] data = writeRandomBlob(container, name, length);
|
||||
writeRandomBlob(container, name, length);
|
||||
|
||||
Map<String, BlobMetaData> blobs = container.listBlobs();
|
||||
assertThat(blobs.size(), equalTo(numberOfFooBlobs + numberOfBarBlobs));
|
||||
|
|
|
@ -24,6 +24,7 @@ import com.carrotsearch.randomizedtesting.SysGlobals;
|
|||
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
@ -165,10 +166,6 @@ public final class InternalTestCluster extends TestCluster {
|
|||
|
||||
private final Logger logger = Loggers.getLogger(getClass());
|
||||
|
||||
|
||||
private static final AtomicInteger clusterOrdinal = new AtomicInteger();
|
||||
|
||||
|
||||
public static final int DEFAULT_LOW_NUM_MASTER_NODES = 1;
|
||||
public static final int DEFAULT_HIGH_NUM_MASTER_NODES = 3;
|
||||
|
||||
|
@ -317,7 +314,6 @@ public final class InternalTestCluster extends TestCluster {
|
|||
|
||||
this.mockPlugins = mockPlugins;
|
||||
|
||||
|
||||
sharedNodesSeeds = new long[numSharedDedicatedMasterNodes + numSharedDataNodes + numSharedCoordOnlyNodes];
|
||||
for (int i = 0; i < sharedNodesSeeds.length; i++) {
|
||||
sharedNodesSeeds[i] = random.nextLong();
|
||||
|
@ -2062,6 +2058,7 @@ public final class InternalTestCluster extends TestCluster {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Iterable<Client> getClients() {
|
||||
ensureOpen();
|
||||
return () -> {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.test.store;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.SeedUtils;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.index.CheckIndex;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
|
@ -62,10 +63,6 @@ public class MockFSDirectoryService extends FsDirectoryService {
|
|||
Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope);
|
||||
public static final Setting<Double> RANDOM_IO_EXCEPTION_RATE_SETTING =
|
||||
Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope);
|
||||
public static final Setting<Boolean> RANDOM_PREVENT_DOUBLE_WRITE_SETTING =
|
||||
Setting.boolSetting("index.store.mock.random.prevent_double_write", true, Property.IndexScope, Property.NodeScope);
|
||||
public static final Setting<Boolean> RANDOM_NO_DELETE_OPEN_FILE_SETTING =
|
||||
Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, Property.IndexScope, Property.NodeScope);
|
||||
public static final Setting<Boolean> CRASH_INDEX_SETTING =
|
||||
Setting.boolSetting("index.store.mock.random.crash_index", true, Property.IndexScope, Property.NodeScope);
|
||||
|
||||
|
@ -74,8 +71,6 @@ public class MockFSDirectoryService extends FsDirectoryService {
|
|||
private final double randomIOExceptionRate;
|
||||
private final double randomIOExceptionRateOnOpen;
|
||||
private final MockDirectoryWrapper.Throttling throttle;
|
||||
private final boolean preventDoubleWrite;
|
||||
private final boolean noDeleteOpenFile;
|
||||
private final boolean crashIndex;
|
||||
|
||||
@Inject
|
||||
|
@ -87,9 +82,6 @@ public class MockFSDirectoryService extends FsDirectoryService {
|
|||
|
||||
randomIOExceptionRate = RANDOM_IO_EXCEPTION_RATE_SETTING.get(indexSettings);
|
||||
randomIOExceptionRateOnOpen = RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.get(indexSettings);
|
||||
preventDoubleWrite = RANDOM_PREVENT_DOUBLE_WRITE_SETTING.get(indexSettings);
|
||||
noDeleteOpenFile = RANDOM_NO_DELETE_OPEN_FILE_SETTING.exists(indexSettings) ?
|
||||
RANDOM_NO_DELETE_OPEN_FILE_SETTING.get(indexSettings) : random.nextBoolean();
|
||||
random.nextInt(shardId.getId() + 1); // some randomness per shard
|
||||
throttle = MockDirectoryWrapper.Throttling.NEVER;
|
||||
crashIndex = CRASH_INDEX_SETTING.get(indexSettings);
|
||||
|
|
|
@ -61,8 +61,6 @@ public class MockFSIndexStore extends IndexStore {
|
|||
return Arrays.asList(INDEX_CHECK_INDEX_ON_CLOSE_SETTING,
|
||||
MockFSDirectoryService.CRASH_INDEX_SETTING,
|
||||
MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING,
|
||||
MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING,
|
||||
MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING,
|
||||
MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING);
|
||||
}
|
||||
|
||||
|
@ -86,6 +84,7 @@ public class MockFSIndexStore extends IndexStore {
|
|||
super(indexSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DirectoryService newDirectoryService(ShardPath path) {
|
||||
return new MockFSDirectoryService(indexSettings, this, path);
|
||||
}
|
||||
|
|
|
@ -43,6 +43,7 @@ public class LicenseVerifier {
|
|||
try {
|
||||
byte[] signatureBytes = Base64.getDecoder().decode(license.signature());
|
||||
ByteBuffer byteBuffer = ByteBuffer.wrap(signatureBytes);
|
||||
@SuppressWarnings("unused")
|
||||
int version = byteBuffer.getInt();
|
||||
int magicLen = byteBuffer.getInt();
|
||||
byte[] magic = new byte[magicLen];
|
||||
|
|
|
@ -254,7 +254,6 @@ public class Cron implements ToXContentFragment {
|
|||
private transient boolean lastdayOfMonth = false;
|
||||
private transient boolean nearestWeekday = false;
|
||||
private transient int lastdayOffset = 0;
|
||||
private transient boolean expressionParsed = false;
|
||||
|
||||
public static final int MAX_YEAR = Calendar.getInstance(UTC, Locale.ROOT).get(Calendar.YEAR) + 100;
|
||||
|
||||
|
@ -802,7 +801,6 @@ public class Cron implements ToXContentFragment {
|
|||
////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
private void buildExpression(String expression) {
|
||||
expressionParsed = true;
|
||||
|
||||
try {
|
||||
|
||||
|
@ -1214,32 +1212,6 @@ public class Cron implements ToXContentFragment {
|
|||
return buf.toString();
|
||||
}
|
||||
|
||||
private static String expressionSetSummary(java.util.ArrayList<Integer> list) {
|
||||
|
||||
if (list.contains(NO_SPEC)) {
|
||||
return "?";
|
||||
}
|
||||
if (list.contains(ALL_SPEC)) {
|
||||
return "*";
|
||||
}
|
||||
|
||||
StringBuilder buf = new StringBuilder();
|
||||
|
||||
Iterator<Integer> itr = list.iterator();
|
||||
boolean first = true;
|
||||
while (itr.hasNext()) {
|
||||
Integer iVal = itr.next();
|
||||
String val = iVal.toString();
|
||||
if (!first) {
|
||||
buf.append(",");
|
||||
}
|
||||
buf.append(val);
|
||||
first = false;
|
||||
}
|
||||
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
private static int skipWhiteSpace(int i, String s) {
|
||||
for (; i < s.length() && (s.charAt(i) == ' ' || s.charAt(i) == '\t'); i++) {
|
||||
;
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.xpack.core.security.support.Automatons;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -44,7 +43,6 @@ public final class FieldPermissions implements Accountable {
|
|||
|
||||
private static final long BASE_FIELD_PERM_DEF_BYTES = RamUsageEstimator.shallowSizeOf(new FieldPermissionsDefinition(null, null));
|
||||
private static final long BASE_FIELD_GROUP_BYTES = RamUsageEstimator.shallowSizeOf(new FieldGrantExcludeGroup(null, null));
|
||||
private static final long BASE_HASHSET_SIZE = RamUsageEstimator.shallowSizeOfInstance(HashSet.class);
|
||||
private static final long BASE_HASHSET_ENTRY_SIZE;
|
||||
static {
|
||||
HashMap<String, Object> map = new HashMap<>();
|
||||
|
|
|
@ -185,7 +185,6 @@ public class WatchSourceBuilder implements ToXContentObject {
|
|||
|
||||
static class TransformedAction implements ToXContentObject {
|
||||
|
||||
private final String id;
|
||||
private final Action action;
|
||||
@Nullable private final TimeValue throttlePeriod;
|
||||
@Nullable private final Condition condition;
|
||||
|
@ -193,7 +192,6 @@ public class WatchSourceBuilder implements ToXContentObject {
|
|||
|
||||
TransformedAction(String id, Action action, @Nullable TimeValue throttlePeriod,
|
||||
@Nullable Condition condition, @Nullable Transform transform) {
|
||||
this.id = id;
|
||||
this.throttlePeriod = throttlePeriod;
|
||||
this.condition = condition;
|
||||
this.transform = transform;
|
||||
|
|
|
@ -10,7 +10,6 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.AbstractStreamableTestCase;
|
||||
import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction;
|
||||
import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent;
|
||||
import org.elasticsearch.xpack.core.ml.calendars.ScheduledEventTests;
|
||||
|
||||
|
@ -63,7 +62,6 @@ public class PostCalendarEventActionRequestTests extends AbstractStreamableTestC
|
|||
|
||||
public void testParseRequest_throwsIfCalendarIdsAreDifferent() throws IOException {
|
||||
PostCalendarEventsAction.Request sourceRequest = createTestInstance("foo");
|
||||
PostCalendarEventsAction.Request request = new PostCalendarEventsAction.Request("bar", sourceRequest.getScheduledEvents());
|
||||
|
||||
StringBuilder requestString = new StringBuilder();
|
||||
requestString.append("{\"events\": [");
|
||||
|
|
|
@ -27,9 +27,9 @@ import org.elasticsearch.script.Script;
|
|||
import org.elasticsearch.script.ScriptType;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.xpack.core.XPackPlugin;
|
||||
import org.elasticsearch.xpack.graph.Graph;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreAction;
|
||||
import org.elasticsearch.xpack.core.graph.action.GraphExploreRequestBuilder;
|
||||
import org.elasticsearch.xpack.graph.Graph;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -327,7 +327,7 @@ public class GraphTests extends ESSingleNodeTestCase {
|
|||
assertTrue(rte.getMessage().contains(GraphExploreRequest.NO_HOPS_ERROR_MESSAGE));
|
||||
}
|
||||
|
||||
Hop hop = grb.createNextHop(null);
|
||||
grb.createNextHop(null);
|
||||
|
||||
try {
|
||||
grb.get();
|
||||
|
|
|
@ -11,7 +11,6 @@ import org.elasticsearch.common.component.AbstractComponent;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister;
|
||||
|
@ -66,8 +65,6 @@ public class DataCountsReporter extends AbstractComponent {
|
|||
Property.Dynamic,
|
||||
Property.NodeScope);
|
||||
|
||||
private static final TimeValue PERSIST_INTERVAL = TimeValue.timeValueMillis(10_000L);
|
||||
|
||||
private final Job job;
|
||||
private final JobDataCountsPersister dataCountsPersister;
|
||||
|
||||
|
|
|
@ -12,11 +12,11 @@ import org.elasticsearch.rest.BaseRestHandler;
|
|||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction;
|
||||
import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction.Request;
|
||||
import org.elasticsearch.xpack.core.ml.action.util.PageParams;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.MachineLearning;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -30,7 +30,6 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler {
|
|||
private final String DEFAULT_SORT = null;
|
||||
private final String DEFAULT_START = null;
|
||||
private final String DEFAULT_END = null;
|
||||
private final String DEFAULT_DESCRIPTION = null;
|
||||
private final boolean DEFAULT_DESC_ORDER = true;
|
||||
|
||||
public RestGetModelSnapshotsAction(Settings settings, RestController controller) {
|
||||
|
|
|
@ -17,6 +17,7 @@ import org.elasticsearch.search.aggregations.Aggregations;
|
|||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.Term;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
|
@ -33,7 +34,6 @@ import java.util.Optional;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.Term;
|
||||
import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createHistogramBucket;
|
||||
import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createMax;
|
||||
import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createTerms;
|
||||
|
@ -254,7 +254,7 @@ public class AggregationDataExtractorTests extends ESTestCase {
|
|||
extractor.setNextResponse(createResponseWithShardFailures());
|
||||
|
||||
assertThat(extractor.hasNext(), is(true));
|
||||
IOException e = expectThrows(IOException.class, extractor::next);
|
||||
expectThrows(IOException.class, extractor::next);
|
||||
}
|
||||
|
||||
public void testExtractionGivenInitSearchResponseEncounteredUnavailableShards() {
|
||||
|
|
|
@ -16,7 +16,6 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
public class ForecastParamsTests extends ESTestCase {
|
||||
|
||||
private static ParseField END = new ParseField("end");
|
||||
private static ParseField DURATION = new ParseField("duration");
|
||||
|
||||
public void testForecastIdsAreUnique() {
|
||||
|
|
|
@ -81,6 +81,7 @@ public class SearchActionTests extends ESTestCase {
|
|||
|
||||
private NamedWriteableRegistry namedWriteableRegistry;
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
@ -523,8 +524,7 @@ public class SearchActionTests extends ESTestCase {
|
|||
public void testNoIndicesToSeparate() {
|
||||
String[] indices = new String[]{};
|
||||
ImmutableOpenMap<String, IndexMetaData> meta = ImmutableOpenMap.<String, IndexMetaData>builder().build();
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> TransportRollupSearchAction.separateIndices(indices, meta));
|
||||
expectThrows(IllegalArgumentException.class, () -> TransportRollupSearchAction.separateIndices(indices, meta));
|
||||
}
|
||||
|
||||
public void testSeparateAll() {
|
||||
|
@ -774,6 +774,7 @@ public class SearchActionTests extends ESTestCase {
|
|||
|
||||
MultiSearchResponse msearchResponse
|
||||
= new MultiSearchResponse(new MultiSearchResponse.Item[]{unrolledResponse, rolledResponse}, 123);
|
||||
|
||||
SearchResponse response = TransportRollupSearchAction.processResponses(separateIndices, msearchResponse,
|
||||
mock(InternalAggregation.ReduceContext.class));
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue