Merge branch 'master' into feature/query-refactoring
Conflicts: core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java
This commit is contained in:
commit
fa93cc7b89
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
<artifactId>parent</artifactId>
|
||||
<version>2.1.0-SNAPSHOT</version>
|
||||
<version>3.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
|
|
|
@ -259,8 +259,9 @@ public class Version {
|
|||
public static final Version V_2_0_0 = new Version(V_2_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1);
|
||||
public static final int V_2_1_0_ID = 2010099;
|
||||
public static final Version V_2_1_0 = new Version(V_2_1_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0);
|
||||
|
||||
public static final Version CURRENT = V_2_1_0;
|
||||
public static final int V_3_0_0_ID = 3000099;
|
||||
public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0);
|
||||
public static final Version CURRENT = V_3_0_0;
|
||||
|
||||
static {
|
||||
assert CURRENT.luceneVersion.equals(Lucene.VERSION) : "Version must be upgraded to [" + Lucene.VERSION + "] is still set to [" + CURRENT.luceneVersion + "]";
|
||||
|
@ -272,6 +273,8 @@ public class Version {
|
|||
|
||||
public static Version fromId(int id) {
|
||||
switch (id) {
|
||||
case V_3_0_0_ID:
|
||||
return V_3_0_0;
|
||||
case V_2_1_0_ID:
|
||||
return V_2_1_0;
|
||||
case V_2_0_0_ID:
|
||||
|
|
|
@ -154,14 +154,14 @@ public class AliasAction implements Streamable {
|
|||
}
|
||||
}
|
||||
|
||||
public AliasAction filter(QueryBuilder filterBuilder) {
|
||||
if (filterBuilder == null) {
|
||||
public AliasAction filter(QueryBuilder queryBuilder) {
|
||||
if (queryBuilder == null) {
|
||||
this.filter = null;
|
||||
return this;
|
||||
}
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
queryBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.close();
|
||||
this.filter = builder.string();
|
||||
return this;
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.common.util.concurrent;
|
||||
|
||||
import com.google.common.annotations.Beta;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.transport.Transports;
|
||||
|
||||
|
@ -195,7 +194,6 @@ public abstract class BaseFuture<V> implements Future<V> {
|
|||
return result;
|
||||
}
|
||||
|
||||
@Beta
|
||||
protected void done() {
|
||||
}
|
||||
|
||||
|
|
|
@ -43,12 +43,19 @@ public abstract class IndexingOperationListener {
|
|||
}
|
||||
|
||||
/**
|
||||
* Called after the indexing operation occurred.
|
||||
* Called after create index operation occurred.
|
||||
*/
|
||||
public void postCreate(Engine.Create create) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called after create index operation occurred with exception.
|
||||
*/
|
||||
public void postCreate(Engine.Create create, Throwable ex) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called before the indexing occurs.
|
||||
*/
|
||||
|
@ -73,6 +80,13 @@ public abstract class IndexingOperationListener {
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called after the indexing operation occurred with exception.
|
||||
*/
|
||||
public void postIndex(Engine.Index index, Throwable ex) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called before the delete occurs.
|
||||
*/
|
||||
|
@ -96,4 +110,11 @@ public abstract class IndexingOperationListener {
|
|||
public void postDelete(Engine.Delete delete) {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called after the delete operation occurred with exception.
|
||||
*/
|
||||
public void postDelete(Engine.Delete delete, Throwable ex) {
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,7 +99,7 @@ public class ShardIndexingService extends AbstractIndexShardComponent {
|
|||
try {
|
||||
listener.postCreateUnderLock(create);
|
||||
} catch (Exception e) {
|
||||
logger.warn("post listener [{}] failed", e, listener);
|
||||
logger.warn("postCreateUnderLock listener [{}] failed", e, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -124,12 +124,19 @@ public class ShardIndexingService extends AbstractIndexShardComponent {
|
|||
try {
|
||||
listener.postCreate(create);
|
||||
} catch (Exception e) {
|
||||
logger.warn("post listener [{}] failed", e, listener);
|
||||
logger.warn("postCreate listener [{}] failed", e, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void postCreate(Engine.Create create, Throwable ex) {
|
||||
for (IndexingOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.postCreate(create, ex);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("postCreate listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Engine.Index preIndex(Engine.Index index) {
|
||||
|
@ -146,7 +153,7 @@ public class ShardIndexingService extends AbstractIndexShardComponent {
|
|||
try {
|
||||
listener.postIndexUnderLock(index);
|
||||
} catch (Exception e) {
|
||||
logger.warn("post listener [{}] failed", e, listener);
|
||||
logger.warn("postIndexUnderLock listener [{}] failed", e, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -163,7 +170,7 @@ public class ShardIndexingService extends AbstractIndexShardComponent {
|
|||
try {
|
||||
listener.postIndex(index);
|
||||
} catch (Exception e) {
|
||||
logger.warn("post listener [{}] failed", e, listener);
|
||||
logger.warn("postIndex listener [{}] failed", e, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -171,6 +178,13 @@ public class ShardIndexingService extends AbstractIndexShardComponent {
|
|||
public void postIndex(Engine.Index index, Throwable ex) {
|
||||
totalStats.indexCurrent.dec();
|
||||
typeStats(index.type()).indexCurrent.dec();
|
||||
for (IndexingOperationListener listener : listeners) {
|
||||
try {
|
||||
listener.postIndex(index, ex);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("postIndex listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Engine.Delete preDelete(Engine.Delete delete) {
|
||||
|
@ -187,7 +201,7 @@ public class ShardIndexingService extends AbstractIndexShardComponent {
|
|||
try {
|
||||
listener.postDeleteUnderLock(delete);
|
||||
} catch (Exception e) {
|
||||
logger.warn("post listener [{}] failed", e, listener);
|
||||
logger.warn("postDeleteUnderLock listener [{}] failed", e, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -203,7 +217,7 @@ public class ShardIndexingService extends AbstractIndexShardComponent {
|
|||
try {
|
||||
listener.postDelete(delete);
|
||||
} catch (Exception e) {
|
||||
logger.warn("post listener [{}] failed", e, listener);
|
||||
logger.warn("postDelete listener [{}] failed", e, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -211,6 +225,13 @@ public class ShardIndexingService extends AbstractIndexShardComponent {
|
|||
public void postDelete(Engine.Delete delete, Throwable ex) {
|
||||
totalStats.deleteCurrent.dec();
|
||||
typeStats(delete.type()).deleteCurrent.dec();
|
||||
for (IndexingOperationListener listener : listeners) {
|
||||
try {
|
||||
listener. postDelete(delete, ex);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("postDelete listener [{}] failed", t, listener);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void noopUpdate(String type) {
|
||||
|
|
|
@ -80,15 +80,16 @@ public class PluginManager {
|
|||
"analysis-phonetic",
|
||||
"analysis-smartcn",
|
||||
"analysis-stempel",
|
||||
"cloud-aws",
|
||||
"cloud-azure",
|
||||
"cloud-gce",
|
||||
"delete-by-query",
|
||||
"discovery-ec2",
|
||||
"discovery-multicast",
|
||||
"lang-javascript",
|
||||
"lang-python",
|
||||
"mapper-murmur3",
|
||||
"mapper-size"
|
||||
"mapper-size",
|
||||
"repository-s3"
|
||||
).build();
|
||||
|
||||
private final Environment environment;
|
||||
|
|
|
@ -20,6 +20,9 @@
|
|||
package org.elasticsearch.search.query;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.MinDocQuery;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
|
@ -100,23 +103,39 @@ public class QueryPhase implements SearchPhase {
|
|||
// here to make sure it happens during the QUERY phase
|
||||
aggregationPhase.preProcess(searchContext);
|
||||
|
||||
searchContext.queryResult().searchTimedOut(false);
|
||||
boolean rescore = execute(searchContext, searchContext.searcher());
|
||||
|
||||
if (rescore) { // only if we do a regular search
|
||||
rescorePhase.execute(searchContext);
|
||||
}
|
||||
suggestPhase.execute(searchContext);
|
||||
aggregationPhase.execute(searchContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* In a package-private method so that it can be tested without having to
|
||||
* wire everything (mapperService, etc.)
|
||||
* @return whether the rescoring phase should be executed
|
||||
*/
|
||||
static boolean execute(SearchContext searchContext, final IndexSearcher searcher) throws QueryPhaseExecutionException {
|
||||
QuerySearchResult queryResult = searchContext.queryResult();
|
||||
queryResult.searchTimedOut(false);
|
||||
|
||||
final SearchType searchType = searchContext.searchType();
|
||||
boolean rescore = false;
|
||||
try {
|
||||
searchContext.queryResult().from(searchContext.from());
|
||||
searchContext.queryResult().size(searchContext.size());
|
||||
queryResult.from(searchContext.from());
|
||||
queryResult.size(searchContext.size());
|
||||
|
||||
final IndexSearcher searcher = searchContext.searcher();
|
||||
Query query = searchContext.query();
|
||||
|
||||
final int totalNumDocs = searcher.getIndexReader().numDocs();
|
||||
int numDocs = Math.min(searchContext.from() + searchContext.size(), totalNumDocs);
|
||||
|
||||
Collector collector;
|
||||
final Callable<TopDocs> topDocsCallable;
|
||||
Callable<TopDocs> topDocsCallable;
|
||||
|
||||
assert query == searcher.rewrite(query); // already rewritten
|
||||
if (searchContext.size() == 0) { // no matter what the value of from is
|
||||
final TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
|
||||
collector = totalHitCountCollector;
|
||||
|
@ -240,36 +259,75 @@ public class QueryPhase implements SearchPhase {
|
|||
collector = new MinimumScoreCollector(collector, searchContext.minimumScore());
|
||||
}
|
||||
|
||||
if (collector.getClass() == TotalHitCountCollector.class) {
|
||||
// Optimize counts in simple cases to return in constant time
|
||||
// instead of using a collector
|
||||
while (true) {
|
||||
// remove wrappers that don't matter for counts
|
||||
// this is necessary so that we don't only optimize match_all
|
||||
// queries but also match_all queries that are nested in
|
||||
// a constant_score query
|
||||
if (query instanceof ConstantScoreQuery) {
|
||||
query = ((ConstantScoreQuery) query).getQuery();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (query.getClass() == MatchAllDocsQuery.class) {
|
||||
collector = null;
|
||||
topDocsCallable = new Callable<TopDocs>() {
|
||||
@Override
|
||||
public TopDocs call() throws Exception {
|
||||
int count = searcher.getIndexReader().numDocs();
|
||||
return new TopDocs(count, Lucene.EMPTY_SCORE_DOCS, 0);
|
||||
}
|
||||
};
|
||||
} else if (query.getClass() == TermQuery.class && searcher.getIndexReader().hasDeletions() == false) {
|
||||
final Term term = ((TermQuery) query).getTerm();
|
||||
collector = null;
|
||||
topDocsCallable = new Callable<TopDocs>() {
|
||||
@Override
|
||||
public TopDocs call() throws Exception {
|
||||
int count = 0;
|
||||
for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
|
||||
count += context.reader().docFreq(term);
|
||||
}
|
||||
return new TopDocs(count, Lucene.EMPTY_SCORE_DOCS, 0);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
final boolean timeoutSet = searchContext.timeoutInMillis() != SearchService.NO_TIMEOUT.millis();
|
||||
if (timeoutSet) {
|
||||
if (timeoutSet && collector != null) { // collector might be null if no collection is actually needed
|
||||
// TODO: change to use our own counter that uses the scheduler in ThreadPool
|
||||
// throws TimeLimitingCollector.TimeExceededException when timeout has reached
|
||||
collector = Lucene.wrapTimeLimitingCollector(collector, searchContext.timeEstimateCounter(), searchContext.timeoutInMillis());
|
||||
}
|
||||
|
||||
try {
|
||||
searchContext.searcher().search(query, collector);
|
||||
if (collector != null) {
|
||||
searcher.search(query, collector);
|
||||
}
|
||||
} catch (TimeLimitingCollector.TimeExceededException e) {
|
||||
assert timeoutSet : "TimeExceededException thrown even though timeout wasn't set";
|
||||
searchContext.queryResult().searchTimedOut(true);
|
||||
queryResult.searchTimedOut(true);
|
||||
} catch (Lucene.EarlyTerminationException e) {
|
||||
assert terminateAfterSet : "EarlyTerminationException thrown even though terminateAfter wasn't set";
|
||||
searchContext.queryResult().terminatedEarly(true);
|
||||
queryResult.terminatedEarly(true);
|
||||
} finally {
|
||||
searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION);
|
||||
}
|
||||
if (terminateAfterSet && searchContext.queryResult().terminatedEarly() == null) {
|
||||
searchContext.queryResult().terminatedEarly(false);
|
||||
if (terminateAfterSet && queryResult.terminatedEarly() == null) {
|
||||
queryResult.terminatedEarly(false);
|
||||
}
|
||||
|
||||
searchContext.queryResult().topDocs(topDocsCallable.call());
|
||||
queryResult.topDocs(topDocsCallable.call());
|
||||
|
||||
return rescore;
|
||||
} catch (Throwable e) {
|
||||
throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e);
|
||||
}
|
||||
if (rescore) { // only if we do a regular search
|
||||
rescorePhase.execute(searchContext);
|
||||
}
|
||||
suggestPhase.execute(searchContext);
|
||||
aggregationPhase.execute(searchContext);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,15 +38,16 @@ OFFICIAL PLUGINS
|
|||
- analysis-phonetic
|
||||
- analysis-smartcn
|
||||
- analysis-stempel
|
||||
- cloud-aws
|
||||
- cloud-azure
|
||||
- cloud-gce
|
||||
- delete-by-query
|
||||
- discovery-ec2
|
||||
- discovery-multicast
|
||||
- lang-javascript
|
||||
- lang-python
|
||||
- mapper-murmur3
|
||||
- mapper-size
|
||||
- repository-s3
|
||||
|
||||
|
||||
OPTIONS
|
||||
|
|
|
@ -56,7 +56,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class BlendedTermQueryTest extends ESTestCase {
|
||||
public class BlendedTermQueryTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testBooleanQuery() throws IOException {
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.action.RoutingMissingException;
|
|||
import org.elasticsearch.action.TimestampParsingException;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.client.AbstractClientHeadersTestCase;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.metadata.SnapshotId;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
|
@ -44,6 +45,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.common.util.CancellableThreadsTests;
|
||||
import org.elasticsearch.discovery.DiscoverySettings;
|
||||
import org.elasticsearch.index.AlreadyExpiredException;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -104,9 +106,9 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
org.elasticsearch.test.rest.parser.RestTestParseException.class,
|
||||
org.elasticsearch.index.query.TestQueryParsingException.class,
|
||||
org.elasticsearch.test.rest.client.RestException.class,
|
||||
org.elasticsearch.common.util.CancellableThreadsTest.CustomException.class,
|
||||
CancellableThreadsTests.CustomException.class,
|
||||
org.elasticsearch.rest.BytesRestResponseTests.WithHeadersException.class,
|
||||
org.elasticsearch.client.AbstractClientHeadersTests.InternalException.class);
|
||||
AbstractClientHeadersTestCase.InternalException.class);
|
||||
FileVisitor<Path> visitor = new FileVisitor<Path>() {
|
||||
private Path pkgPrefix = PathUtils.get(path).getParent();
|
||||
|
||||
|
|
|
@ -18,25 +18,25 @@
|
|||
*/
|
||||
package org.elasticsearch;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.ESTokenStreamTestCase;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.file.*;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.FileVisitor;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
@ -53,20 +53,17 @@ public class NamingConventionTests extends ESTestCase {
|
|||
final Set<Class> pureUnitTest = new HashSet<>();
|
||||
final Set<Class> missingSuffix = new HashSet<>();
|
||||
final Set<Class> integTestsInDisguise = new HashSet<>();
|
||||
final Set<Class> notRunnable = new HashSet<>();
|
||||
final Set<Class> innerClasses = new HashSet<>();
|
||||
String[] packages = {"org.elasticsearch", "org.apache.lucene"};
|
||||
for (final String packageName : packages) {
|
||||
final String path = "/" + packageName.replace('.', '/');
|
||||
final Path startPath = getDataPath(path);
|
||||
final Set<Path> ignore = Sets.newHashSet(PathUtils.get("/org/elasticsearch/stresstest"), PathUtils.get("/org/elasticsearch/benchmark/stress"));
|
||||
Files.walkFileTree(startPath, new FileVisitor<Path>() {
|
||||
private Path pkgPrefix = PathUtils.get(path).getParent();
|
||||
@Override
|
||||
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
|
||||
Path next = pkgPrefix.resolve(dir.getFileName());
|
||||
if (ignore.contains(next)) {
|
||||
return FileVisitResult.SKIP_SUBTREE;
|
||||
}
|
||||
pkgPrefix = next;
|
||||
pkgPrefix = pkgPrefix.resolve(dir.getFileName());
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
|
@ -76,28 +73,30 @@ public class NamingConventionTests extends ESTestCase {
|
|||
String filename = file.getFileName().toString();
|
||||
if (filename.endsWith(".class")) {
|
||||
Class<?> clazz = loadClass(filename);
|
||||
if (Modifier.isAbstract(clazz.getModifiers()) == false && Modifier.isInterface(clazz.getModifiers()) == false) {
|
||||
if (clazz.getName().endsWith("Tests") ||
|
||||
clazz.getName().endsWith("Test")) { // don't worry about the ones that match the pattern
|
||||
if (clazz.getName().endsWith("Tests")) { // don't worry about the ones that match the pattern
|
||||
|
||||
if (ESIntegTestCase.class.isAssignableFrom(clazz)) {
|
||||
integTestsInDisguise.add(clazz);
|
||||
}
|
||||
if (isTestCase(clazz) == false) {
|
||||
notImplementing.add(clazz);
|
||||
}
|
||||
} else if (clazz.getName().endsWith("IT")) {
|
||||
if (isTestCase(clazz) == false) {
|
||||
notImplementing.add(clazz);
|
||||
}
|
||||
// otherwise fine
|
||||
} else if (isTestCase(clazz)) {
|
||||
if (ESIntegTestCase.class.isAssignableFrom(clazz)) {
|
||||
integTestsInDisguise.add(clazz);
|
||||
}
|
||||
if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
|
||||
notRunnable.add(clazz);
|
||||
} else if (isTestCase(clazz) == false) {
|
||||
notImplementing.add(clazz);
|
||||
} else if (Modifier.isStatic(clazz.getModifiers())) {
|
||||
innerClasses.add(clazz);
|
||||
}
|
||||
} else if (clazz.getName().endsWith("IT")) {
|
||||
if (isTestCase(clazz) == false) {
|
||||
notImplementing.add(clazz);
|
||||
}
|
||||
// otherwise fine
|
||||
} else if (Modifier.isAbstract(clazz.getModifiers()) == false && Modifier.isInterface(clazz.getModifiers()) == false) {
|
||||
if (isTestCase(clazz)) {
|
||||
missingSuffix.add(clazz);
|
||||
} else if (junit.framework.Test.class.isAssignableFrom(clazz) || hasTestAnnotation(clazz)) {
|
||||
pureUnitTest.add(clazz);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new RuntimeException(e);
|
||||
|
@ -143,39 +142,47 @@ public class NamingConventionTests extends ESTestCase {
|
|||
}
|
||||
assertTrue(missingSuffix.remove(WrongName.class));
|
||||
assertTrue(missingSuffix.remove(WrongNameTheSecond.class));
|
||||
assertTrue(notRunnable.remove(DummyAbstractTests.class));
|
||||
assertTrue(notRunnable.remove(DummyInterfaceTests.class));
|
||||
assertTrue(innerClasses.remove(InnerTests.class));
|
||||
assertTrue(notImplementing.remove(NotImplementingTests.class));
|
||||
assertTrue(notImplementing.remove(NotImplementingTest.class));
|
||||
assertTrue(pureUnitTest.remove(PlainUnit.class));
|
||||
assertTrue(pureUnitTest.remove(PlainUnitTheSecond.class));
|
||||
|
||||
String classesToSubclass = Joiner.on(',').join(
|
||||
ESTestCase.class.getSimpleName(),
|
||||
ESTestCase.class.getSimpleName(),
|
||||
ESTokenStreamTestCase.class.getSimpleName(),
|
||||
LuceneTestCase.class.getSimpleName());
|
||||
ESTestCase.class.getSimpleName(),
|
||||
ESTestCase.class.getSimpleName(),
|
||||
ESTokenStreamTestCase.class.getSimpleName(),
|
||||
LuceneTestCase.class.getSimpleName());
|
||||
assertTrue("Not all subclasses of " + ESTestCase.class.getSimpleName() +
|
||||
" match the naming convention. Concrete classes must end with [Test|Tests]: " + missingSuffix.toString(),
|
||||
missingSuffix.isEmpty());
|
||||
assertTrue("Pure Unit-Test found must subclass one of [" + classesToSubclass +"] " + pureUnitTest.toString(),
|
||||
pureUnitTest.isEmpty());
|
||||
assertTrue("Classes ending with Test|Tests] must subclass [" + classesToSubclass +"] " + notImplementing.toString(),
|
||||
notImplementing.isEmpty());
|
||||
assertTrue("Subclasses of ESIntegTestCase should end with IT as they are integration tests: " + integTestsInDisguise, integTestsInDisguise.isEmpty());
|
||||
" match the naming convention. Concrete classes must end with [Tests]:\n" + Joiner.on('\n').join(missingSuffix),
|
||||
missingSuffix.isEmpty());
|
||||
assertTrue("Classes ending with [Tests] are abstract or interfaces:\n" + Joiner.on('\n').join(notRunnable),
|
||||
notRunnable.isEmpty());
|
||||
assertTrue("Found inner classes that are tests, which are excluded from the test runner:\n" + Joiner.on('\n').join(innerClasses),
|
||||
innerClasses.isEmpty());
|
||||
assertTrue("Pure Unit-Test found must subclass one of [" + classesToSubclass +"]:\n" + Joiner.on('\n').join(pureUnitTest),
|
||||
pureUnitTest.isEmpty());
|
||||
assertTrue("Classes ending with [Tests] must subclass [" + classesToSubclass + "]:\n" + Joiner.on('\n').join(notImplementing),
|
||||
notImplementing.isEmpty());
|
||||
assertTrue("Subclasses of ESIntegTestCase should end with IT as they are integration tests:\n" + Joiner.on('\n').join(integTestsInDisguise),
|
||||
integTestsInDisguise.isEmpty());
|
||||
}
|
||||
|
||||
/*
|
||||
* Some test the test classes
|
||||
*/
|
||||
|
||||
@SuppressForbidden(reason = "Ignoring test the tester")
|
||||
@Ignore
|
||||
public static final class NotImplementingTests {}
|
||||
@SuppressForbidden(reason = "Ignoring test the tester")
|
||||
@Ignore
|
||||
public static final class NotImplementingTest {}
|
||||
|
||||
public static final class WrongName extends ESTestCase {}
|
||||
|
||||
public static abstract class DummyAbstractTests extends ESTestCase {}
|
||||
|
||||
public interface DummyInterfaceTests {}
|
||||
|
||||
public static final class InnerTests extends ESTestCase {}
|
||||
|
||||
public static final class WrongNameTheSecond extends ESTestCase {}
|
||||
|
||||
public static final class PlainUnit extends TestCase {}
|
||||
|
|
|
@ -32,7 +32,7 @@ import static org.hamcrest.CoreMatchers.equalTo;
|
|||
/**
|
||||
* Unit tests for the {@link ClusterStateRequest}.
|
||||
*/
|
||||
public class ClusterStateRequestTest extends ESTestCase {
|
||||
public class ClusterStateRequestTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testSerialization() throws Exception {
|
|
@ -33,7 +33,7 @@ import java.io.IOException;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class CreateIndexRequestBuilderTest extends ESTestCase {
|
||||
public class CreateIndexRequestBuilderTests extends ESTestCase {
|
||||
|
||||
private static final String KEY = "my.settings.key";
|
||||
private static final String VALUE = "my.settings.value";
|
|
@ -36,7 +36,7 @@ import java.util.*;
|
|||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class IndicesShardStoreResponseTest extends ESTestCase {
|
||||
public class IndicesShardStoreResponseTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testBasicSerialization() throws Exception {
|
|
@ -28,7 +28,7 @@ import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX;
|
|||
import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MIN;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class FieldStatsRequestTest extends ESTestCase {
|
||||
public class FieldStatsRequestTests extends ESTestCase {
|
||||
|
||||
public void testFieldsParsing() throws Exception {
|
||||
byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/fieldstats/fieldstats-index-constraints-request.json");
|
|
@ -33,7 +33,7 @@ import java.io.ByteArrayOutputStream;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class IndexRequestBuilderTest extends ESTestCase {
|
||||
public class IndexRequestBuilderTests extends ESTestCase {
|
||||
|
||||
private static final String EXPECTED_SOURCE = "{\"SomeKey\":\"SomeValue\"}";
|
||||
private NoOpClient testClient;
|
|
@ -1,282 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.benchmark.stress;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.CyclicBarrier;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import static org.elasticsearch.client.Requests.searchRequest;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class NodesStressTest {
|
||||
|
||||
private Node[] nodes;
|
||||
|
||||
private int numberOfNodes = 2;
|
||||
|
||||
private Client[] clients;
|
||||
|
||||
private AtomicLong idGenerator = new AtomicLong();
|
||||
|
||||
private int fieldNumLimit = 50;
|
||||
|
||||
private long searcherIterations = 10;
|
||||
private Searcher[] searcherThreads = new Searcher[1];
|
||||
|
||||
private long indexIterations = 10;
|
||||
private Indexer[] indexThreads = new Indexer[1];
|
||||
|
||||
private TimeValue sleepAfterDone = TimeValue.timeValueMillis(0);
|
||||
private TimeValue sleepBeforeClose = TimeValue.timeValueMillis(0);
|
||||
|
||||
private CountDownLatch latch;
|
||||
private CyclicBarrier barrier1;
|
||||
private CyclicBarrier barrier2;
|
||||
|
||||
public NodesStressTest() {
|
||||
}
|
||||
|
||||
public NodesStressTest numberOfNodes(int numberOfNodes) {
|
||||
this.numberOfNodes = numberOfNodes;
|
||||
return this;
|
||||
}
|
||||
|
||||
public NodesStressTest fieldNumLimit(int fieldNumLimit) {
|
||||
this.fieldNumLimit = fieldNumLimit;
|
||||
return this;
|
||||
}
|
||||
|
||||
public NodesStressTest searchIterations(int searchIterations) {
|
||||
this.searcherIterations = searchIterations;
|
||||
return this;
|
||||
}
|
||||
|
||||
public NodesStressTest searcherThreads(int numberOfSearcherThreads) {
|
||||
searcherThreads = new Searcher[numberOfSearcherThreads];
|
||||
return this;
|
||||
}
|
||||
|
||||
public NodesStressTest indexIterations(long indexIterations) {
|
||||
this.indexIterations = indexIterations;
|
||||
return this;
|
||||
}
|
||||
|
||||
public NodesStressTest indexThreads(int numberOfWriterThreads) {
|
||||
indexThreads = new Indexer[numberOfWriterThreads];
|
||||
return this;
|
||||
}
|
||||
|
||||
public NodesStressTest sleepAfterDone(TimeValue time) {
|
||||
this.sleepAfterDone = time;
|
||||
return this;
|
||||
}
|
||||
|
||||
public NodesStressTest sleepBeforeClose(TimeValue time) {
|
||||
this.sleepBeforeClose = time;
|
||||
return this;
|
||||
}
|
||||
|
||||
public NodesStressTest build(Settings settings) throws Exception {
|
||||
settings = settingsBuilder()
|
||||
// .put("index.refresh_interval", 1, TimeUnit.SECONDS)
|
||||
.put(SETTING_NUMBER_OF_SHARDS, 5)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.put(settings)
|
||||
.build();
|
||||
|
||||
nodes = new Node[numberOfNodes];
|
||||
clients = new Client[numberOfNodes];
|
||||
for (int i = 0; i < numberOfNodes; i++) {
|
||||
nodes[i] = nodeBuilder().settings(settingsBuilder().put(settings).put("name", "node" + i)).node();
|
||||
clients[i] = nodes[i].client();
|
||||
}
|
||||
|
||||
for (int i = 0; i < searcherThreads.length; i++) {
|
||||
searcherThreads[i] = new Searcher(i);
|
||||
}
|
||||
for (int i = 0; i < indexThreads.length; i++) {
|
||||
indexThreads[i] = new Indexer(i);
|
||||
}
|
||||
|
||||
latch = new CountDownLatch(1);
|
||||
barrier1 = new CyclicBarrier(2);
|
||||
barrier2 = new CyclicBarrier(2);
|
||||
// warmup
|
||||
StopWatch stopWatch = new StopWatch().start();
|
||||
Indexer warmup = new Indexer(-1).max(10000);
|
||||
warmup.start();
|
||||
barrier1.await();
|
||||
barrier2.await();
|
||||
latch.await();
|
||||
stopWatch.stop();
|
||||
System.out.println("Done Warmup, took [" + stopWatch.totalTime() + "]");
|
||||
|
||||
latch = new CountDownLatch(searcherThreads.length + indexThreads.length);
|
||||
barrier1 = new CyclicBarrier(searcherThreads.length + indexThreads.length + 1);
|
||||
barrier2 = new CyclicBarrier(searcherThreads.length + indexThreads.length + 1);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public void start() throws Exception {
|
||||
for (Thread t : searcherThreads) {
|
||||
t.start();
|
||||
}
|
||||
for (Thread t : indexThreads) {
|
||||
t.start();
|
||||
}
|
||||
barrier1.await();
|
||||
|
||||
StopWatch stopWatch = new StopWatch();
|
||||
stopWatch.start();
|
||||
|
||||
barrier2.await();
|
||||
|
||||
latch.await();
|
||||
stopWatch.stop();
|
||||
|
||||
System.out.println("Done, took [" + stopWatch.totalTime() + "]");
|
||||
System.out.println("Sleeping before close: " + sleepBeforeClose);
|
||||
Thread.sleep(sleepBeforeClose.millis());
|
||||
|
||||
for (Client client : clients) {
|
||||
client.close();
|
||||
}
|
||||
for (Node node : nodes) {
|
||||
node.close();
|
||||
}
|
||||
|
||||
System.out.println("Sleeping before exit: " + sleepBeforeClose);
|
||||
Thread.sleep(sleepAfterDone.millis());
|
||||
}
|
||||
|
||||
class Searcher extends Thread {
|
||||
final int id;
|
||||
long counter = 0;
|
||||
long max = searcherIterations;
|
||||
|
||||
Searcher(int id) {
|
||||
super("Searcher" + id);
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
barrier1.await();
|
||||
barrier2.await();
|
||||
for (; counter < max; counter++) {
|
||||
Client client = client(counter);
|
||||
QueryBuilder query = termQuery("num", counter % fieldNumLimit);
|
||||
query = constantScoreQuery(query);
|
||||
|
||||
SearchResponse search = client.search(searchRequest()
|
||||
.source(searchSource().query(query)))
|
||||
.actionGet();
|
||||
// System.out.println("Got search response, hits [" + search.hits().totalHits() + "]");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.err.println("Failed to search:");
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class Indexer extends Thread {
|
||||
|
||||
final int id;
|
||||
long counter = 0;
|
||||
long max = indexIterations;
|
||||
|
||||
Indexer(int id) {
|
||||
super("Indexer" + id);
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
Indexer max(int max) {
|
||||
this.max = max;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
barrier1.await();
|
||||
barrier2.await();
|
||||
for (; counter < max; counter++) {
|
||||
Client client = client(counter);
|
||||
long id = idGenerator.incrementAndGet();
|
||||
client.index(Requests.indexRequest().index("test").type("type1").id(Long.toString(id))
|
||||
.source(XContentFactory.jsonBuilder().startObject()
|
||||
.field("num", id % fieldNumLimit)
|
||||
.endObject()))
|
||||
.actionGet();
|
||||
}
|
||||
System.out.println("Indexer [" + id + "]: Done");
|
||||
} catch (Exception e) {
|
||||
System.err.println("Failed to index:");
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Client client(long i) {
|
||||
return clients[((int) (i % clients.length))];
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
NodesStressTest test = new NodesStressTest()
|
||||
.numberOfNodes(2)
|
||||
.indexThreads(5)
|
||||
.indexIterations(10 * 1000)
|
||||
.searcherThreads(5)
|
||||
.searchIterations(10 * 1000)
|
||||
.sleepBeforeClose(TimeValue.timeValueMinutes(10))
|
||||
.sleepAfterDone(TimeValue.timeValueMinutes(10))
|
||||
.build(EMPTY_SETTINGS);
|
||||
|
||||
test.start();
|
||||
}
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.benchmark.stress;
|
||||
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.SizeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SingleThreadBulkStress {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
Random random = new Random();
|
||||
|
||||
int shardsCount = Integer.parseInt(System.getProperty("es.shards", "1"));
|
||||
int replicaCount = Integer.parseInt(System.getProperty("es.replica", "1"));
|
||||
boolean autoGenerateId = true;
|
||||
|
||||
Settings settings = settingsBuilder()
|
||||
.put("index.refresh_interval", "1s")
|
||||
.put("index.merge.async", true)
|
||||
.put("index.translog.flush_threshold_ops", 5000)
|
||||
.put(SETTING_NUMBER_OF_SHARDS, shardsCount)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, replicaCount)
|
||||
.build();
|
||||
|
||||
Node[] nodes = new Node[1];
|
||||
for (int i = 0; i < nodes.length; i++) {
|
||||
nodes[i] = nodeBuilder().settings(settingsBuilder().put(settings).put("name", "node" + i)).node();
|
||||
}
|
||||
|
||||
//Node client = nodeBuilder().settings(settingsBuilder().put(settings).put("name", "client")).client(true).node();
|
||||
Node client = nodes[0];
|
||||
|
||||
Client client1 = client.client();
|
||||
|
||||
Thread.sleep(1000);
|
||||
client1.admin().indices().prepareCreate("test").setSettings(settings).addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("_source").field("enabled", false).endObject()
|
||||
.startObject("_all").field("enabled", false).endObject()
|
||||
.startObject("_type").field("index", "no").endObject()
|
||||
.startObject("_id").field("index", "no").endObject()
|
||||
.startObject("properties")
|
||||
.startObject("field").field("type", "string").field("index", "not_analyzed").field("omit_norms", true).endObject()
|
||||
// .startObject("field").field("index", "analyzed").field("omit_norms", false).endObject()
|
||||
.endObject()
|
||||
.endObject().endObject()).execute().actionGet();
|
||||
Thread.sleep(5000);
|
||||
|
||||
StopWatch stopWatch = new StopWatch().start();
|
||||
long COUNT = SizeValue.parseSizeValue("2m").singles();
|
||||
int BATCH = 500;
|
||||
System.out.println("Indexing [" + COUNT + "] ...");
|
||||
long ITERS = COUNT / BATCH;
|
||||
long i = 1;
|
||||
int counter = 0;
|
||||
for (; i <= ITERS; i++) {
|
||||
BulkRequestBuilder request = client1.prepareBulk();
|
||||
for (int j = 0; j < BATCH; j++) {
|
||||
counter++;
|
||||
request.add(Requests.indexRequest("test").type("type1").id(autoGenerateId ? null : Integer.toString(counter)).source(source(Integer.toString(counter), "test" + counter)));
|
||||
}
|
||||
BulkResponse response = request.execute().actionGet();
|
||||
if (response.hasFailures()) {
|
||||
System.err.println("failures...");
|
||||
}
|
||||
if (((i * BATCH) % 10000) == 0) {
|
||||
System.out.println("Indexed " + (i * BATCH) + " took " + stopWatch.stop().lastTaskTime());
|
||||
stopWatch.start();
|
||||
}
|
||||
}
|
||||
System.out.println("Indexing took " + stopWatch.totalTime() + ", TPS " + (((double) COUNT) / stopWatch.totalTime().secondsFrac()));
|
||||
|
||||
client.client().admin().indices().prepareRefresh().execute().actionGet();
|
||||
System.out.println("Count: " + client.client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount());
|
||||
|
||||
client.close();
|
||||
|
||||
for (Node node : nodes) {
|
||||
node.close();
|
||||
}
|
||||
}
|
||||
|
||||
private static XContentBuilder source(String id, String nameValue) throws IOException {
|
||||
return jsonBuilder().startObject().field("field", nameValue).endObject();
|
||||
}
|
||||
}
|
|
@ -1,108 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.benchmark.stress;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.client.Requests.createIndexRequest;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SingleThreadIndexingStress {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
Settings settings = settingsBuilder()
|
||||
.put("index.refresh_interval", "1s")
|
||||
.put("index.merge.async", true)
|
||||
.put("index.translog.flush_threshold_ops", 5000)
|
||||
.put(SETTING_NUMBER_OF_SHARDS, 2)
|
||||
.put(SETTING_NUMBER_OF_REPLICAS, 1)
|
||||
.build();
|
||||
|
||||
Node[] nodes = new Node[1];
|
||||
for (int i = 0; i < nodes.length; i++) {
|
||||
nodes[i] = nodeBuilder().settings(settingsBuilder().put(settings).put("name", "node" + i)).node();
|
||||
}
|
||||
|
||||
Node client = nodeBuilder().settings(settingsBuilder().put(settings).put("name", "client")).client(true).node();
|
||||
|
||||
Client client1 = client.client();
|
||||
|
||||
Thread.sleep(1000);
|
||||
client1.admin().indices().create(createIndexRequest("test")).actionGet();
|
||||
Thread.sleep(5000);
|
||||
|
||||
StopWatch stopWatch = new StopWatch().start();
|
||||
int COUNT = 200000;
|
||||
int ID_RANGE = 100;
|
||||
System.out.println("Indexing [" + COUNT + "] ...");
|
||||
int i = 1;
|
||||
for (; i <= COUNT; i++) {
|
||||
// client1.admin().cluster().preparePingSingle("test", "type1", Integer.toString(i)).execute().actionGet();
|
||||
client1.prepareIndex("test", "type1").setId(Integer.toString(i % ID_RANGE)).setSource(source(Integer.toString(i), "test" + i))
|
||||
.setCreate(false).execute().actionGet();
|
||||
if ((i % 10000) == 0) {
|
||||
System.out.println("Indexed " + i + " took " + stopWatch.stop().lastTaskTime());
|
||||
stopWatch.start();
|
||||
}
|
||||
}
|
||||
System.out.println("Indexing took " + stopWatch.totalTime() + ", TPS " + (((double) COUNT) / stopWatch.totalTime().secondsFrac()));
|
||||
|
||||
client.client().admin().indices().prepareRefresh().execute().actionGet();
|
||||
System.out.println("Count: " + client.client().prepareCount().setQuery(matchAllQuery()).execute().actionGet().getCount());
|
||||
|
||||
client.close();
|
||||
|
||||
for (Node node : nodes) {
|
||||
node.close();
|
||||
}
|
||||
}
|
||||
|
||||
private static XContentBuilder source(String id, String nameValue) throws IOException {
|
||||
long time = System.currentTimeMillis();
|
||||
return jsonBuilder().startObject()
|
||||
.field("id", id)
|
||||
// .field("numeric1", time)
|
||||
// .field("numeric2", time)
|
||||
// .field("numeric3", time)
|
||||
// .field("numeric4", time)
|
||||
// .field("numeric5", time)
|
||||
// .field("numeric6", time)
|
||||
// .field("numeric7", time)
|
||||
// .field("numeric8", time)
|
||||
// .field("numeric9", time)
|
||||
// .field("numeric10", time)
|
||||
.field("name", nameValue)
|
||||
.endObject();
|
||||
}
|
||||
}
|
|
@ -64,7 +64,7 @@ import static org.hamcrest.Matchers.*;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class AbstractClientHeadersTests extends ESTestCase {
|
||||
public abstract class AbstractClientHeadersTestCase extends ESTestCase {
|
||||
|
||||
protected static final Settings HEADER_SETTINGS = Settings.builder()
|
||||
.put(Headers.PREFIX + ".key1", "val1")
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.action.GenericAction;
|
|||
import org.elasticsearch.action.support.ActionFilter;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.TransportAction;
|
||||
import org.elasticsearch.client.AbstractClientHeadersTests;
|
||||
import org.elasticsearch.client.AbstractClientHeadersTestCase;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.support.Headers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -37,7 +37,7 @@ import java.util.HashMap;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class NodeClientHeadersTests extends AbstractClientHeadersTests {
|
||||
public class NodeClientHeadersTests extends AbstractClientHeadersTestCase {
|
||||
|
||||
private static final ActionFilters EMPTY_FILTERS = new ActionFilters(Collections.<ActionFilter>emptySet());
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.action.admin.cluster.node.liveness.LivenessResponse;
|
|||
import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
import org.elasticsearch.client.AbstractClientHeadersTests;
|
||||
import org.elasticsearch.client.AbstractClientHeadersTestCase;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
|
@ -48,7 +48,6 @@ import org.elasticsearch.transport.TransportResponseHandler;
|
|||
import org.elasticsearch.transport.TransportService;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -58,7 +57,7 @@ import static org.hamcrest.Matchers.is;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class TransportClientHeadersTests extends AbstractClientHeadersTests {
|
||||
public class TransportClientHeadersTests extends AbstractClientHeadersTestCase {
|
||||
|
||||
private static final LocalTransportAddress address = new LocalTransportAddress("test");
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.test.ESTestCase;
|
|||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
|
||||
public class MetaDataIndexUpgradeServiceTest extends ESTestCase {
|
||||
public class MetaDataIndexUpgradeServiceTests extends ESTestCase {
|
||||
|
||||
public void testUpgradeStoreSettings() {
|
||||
final String type = RandomPicks.randomFrom(random(), Arrays.asList("nio_fs", "mmap_fs", "simple_fs", "default", "fs"));
|
|
@ -38,7 +38,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
|||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class RoutingTableTest extends ESAllocationTestCase {
|
||||
public class RoutingTableTests extends ESAllocationTestCase {
|
||||
|
||||
private static final String TEST_INDEX_1 = "test1";
|
||||
private static final String TEST_INDEX_2 = "test2";
|
|
@ -40,7 +40,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
|||
/**
|
||||
* see issue #9023
|
||||
*/
|
||||
public class BalanceUnbalancedClusterTest extends CatAllocationTestCase {
|
||||
public class BalanceUnbalancedClusterTests extends CatAllocationTestCase {
|
||||
|
||||
@Override
|
||||
protected Path getCatPath() throws IOException {
|
|
@ -30,7 +30,7 @@ import static org.hamcrest.Matchers.is;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class Base64Test extends ESTestCase {
|
||||
public class Base64Tests extends ESTestCase {
|
||||
|
||||
@Test // issue #6334
|
||||
public void testBase64DecodeWithExtraCharactersAfterPadding() throws Exception {
|
|
@ -40,7 +40,7 @@ import static org.hamcrest.CoreMatchers.equalTo;
|
|||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
|
||||
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
|
||||
public class BlobStoreTest extends ESTestCase {
|
||||
public class BlobStoreTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testWriteRead() throws IOException {
|
|
@ -42,7 +42,7 @@ import java.nio.file.Path;
|
|||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class PagedBytesReferenceTest extends ESTestCase {
|
||||
public class PagedBytesReferenceTests extends ESTestCase {
|
||||
|
||||
private static final int PAGE_SIZE = BigArrays.BYTE_PAGE_SIZE;
|
||||
|
|
@ -37,11 +37,11 @@ import java.util.concurrent.CountDownLatch;
|
|||
/**
|
||||
* Test streaming compression (e.g. used for recovery)
|
||||
*/
|
||||
public abstract class AbstractCompressedStreamTests extends ESTestCase {
|
||||
public abstract class AbstractCompressedStreamTestCase extends ESTestCase {
|
||||
|
||||
private final Compressor compressor;
|
||||
|
||||
protected AbstractCompressedStreamTests(Compressor compressor) {
|
||||
protected AbstractCompressedStreamTestCase(Compressor compressor) {
|
||||
this.compressor = compressor;
|
||||
}
|
||||
|
|
@ -35,11 +35,11 @@ import static org.hamcrest.Matchers.not;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class AbstractCompressedXContentTests extends ESTestCase {
|
||||
public abstract class AbstractCompressedXContentTestCase extends ESTestCase {
|
||||
|
||||
private final Compressor compressor;
|
||||
|
||||
protected AbstractCompressedXContentTests(Compressor compressor) {
|
||||
protected AbstractCompressedXContentTestCase(Compressor compressor) {
|
||||
this.compressor = compressor;
|
||||
}
|
||||
|
|
@ -19,9 +19,9 @@
|
|||
|
||||
package org.elasticsearch.common.compress.deflate;
|
||||
|
||||
import org.elasticsearch.common.compress.AbstractCompressedStreamTests;
|
||||
import org.elasticsearch.common.compress.AbstractCompressedStreamTestCase;
|
||||
|
||||
public class DeflateCompressedStreamTests extends AbstractCompressedStreamTests {
|
||||
public class DeflateCompressedStreamTests extends AbstractCompressedStreamTestCase {
|
||||
|
||||
public DeflateCompressedStreamTests() {
|
||||
super(new DeflateCompressor());
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
|
||||
package org.elasticsearch.common.compress.deflate;
|
||||
|
||||
import org.elasticsearch.common.compress.AbstractCompressedXContentTests;
|
||||
import org.elasticsearch.common.compress.AbstractCompressedXContentTestCase;
|
||||
|
||||
public class DeflateXContentTests extends AbstractCompressedXContentTests {
|
||||
public class DeflateXContentTests extends AbstractCompressedXContentTestCase {
|
||||
|
||||
public DeflateXContentTests() {
|
||||
super(new DeflateCompressor());
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
|
||||
package org.elasticsearch.common.compress.lzf;
|
||||
|
||||
import org.elasticsearch.common.compress.AbstractCompressedStreamTests;
|
||||
import org.elasticsearch.common.compress.AbstractCompressedStreamTestCase;
|
||||
|
||||
public class LZFCompressedStreamTests extends AbstractCompressedStreamTests {
|
||||
public class LZFCompressedStreamTests extends AbstractCompressedStreamTestCase {
|
||||
|
||||
public LZFCompressedStreamTests() {
|
||||
super(new LZFTestCompressor());
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
|
||||
package org.elasticsearch.common.compress.lzf;
|
||||
|
||||
import org.elasticsearch.common.compress.AbstractCompressedXContentTests;
|
||||
import org.elasticsearch.common.compress.AbstractCompressedXContentTestCase;
|
||||
|
||||
public class LZFXContentTests extends AbstractCompressedXContentTests {
|
||||
public class LZFXContentTests extends AbstractCompressedXContentTestCase {
|
||||
|
||||
public LZFXContentTests() {
|
||||
super(new LZFTestCompressor());
|
||||
|
|
|
@ -37,7 +37,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class LuceneTest extends ESTestCase {
|
||||
public class LuceneTests extends ESTestCase {
|
||||
|
||||
|
||||
/*
|
|
@ -25,7 +25,7 @@ import org.junit.Test;
|
|||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class PropertyPlaceholderTest extends ESTestCase {
|
||||
public class PropertyPlaceholderTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testSimple() {
|
|
@ -26,7 +26,7 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public abstract class AbstractRecyclerTests extends ESTestCase {
|
||||
public abstract class AbstractRecyclerTestCase extends ESTestCase {
|
||||
|
||||
// marker states for data
|
||||
protected static final byte FRESH = 1;
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.recycler;
|
||||
|
||||
public class ConcurrentRecyclerTests extends AbstractRecyclerTests {
|
||||
public class ConcurrentRecyclerTests extends AbstractRecyclerTestCase {
|
||||
|
||||
@Override
|
||||
protected Recycler<byte[]> newRecycler(int limit) {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.recycler;
|
||||
|
||||
public class LockedRecyclerTests extends AbstractRecyclerTests {
|
||||
public class LockedRecyclerTests extends AbstractRecyclerTestCase {
|
||||
|
||||
@Override
|
||||
protected Recycler<byte[]> newRecycler(int limit) {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.recycler;
|
||||
|
||||
public class NoneRecyclerTests extends AbstractRecyclerTests {
|
||||
public class NoneRecyclerTests extends AbstractRecyclerTestCase {
|
||||
|
||||
@Override
|
||||
protected Recycler<byte[]> newRecycler(int limit) {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.recycler;
|
||||
|
||||
public class QueueRecyclerTests extends AbstractRecyclerTests {
|
||||
public class QueueRecyclerTests extends AbstractRecyclerTestCase {
|
||||
|
||||
@Override
|
||||
protected Recycler<byte[]> newRecycler(int limit) {
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.junit.Test;
|
|||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
public class CancellableThreadsTest extends ESTestCase {
|
||||
public class CancellableThreadsTests extends ESTestCase {
|
||||
|
||||
public static class CustomException extends RuntimeException {
|
||||
|
|
@ -30,7 +30,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
import static org.hamcrest.Matchers.greaterThan;
|
||||
|
||||
|
||||
public class CountDownTest extends ESTestCase {
|
||||
public class CountDownTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testConcurrent() throws InterruptedException {
|
|
@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.is;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class RefCountedTest extends ESTestCase {
|
||||
public class RefCountedTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testRefCount() throws IOException {
|
|
@ -31,7 +31,7 @@ import static org.hamcrest.CoreMatchers.is;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public abstract class AbstractFilteringJsonGeneratorTests extends ESTestCase {
|
||||
public abstract class AbstractFilteringJsonGeneratorTestCase extends ESTestCase {
|
||||
|
||||
protected abstract XContentType getXContentType();
|
||||
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.common.xcontent.support.filtering;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
public class JsonFilteringGeneratorTests extends AbstractFilteringJsonGeneratorTests {
|
||||
public class JsonFilteringGeneratorTests extends AbstractFilteringJsonGeneratorTestCase {
|
||||
|
||||
@Override
|
||||
protected XContentType getXContentType() {
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.common.xcontent.support.filtering;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
public class YamlFilteringGeneratorTests extends AbstractFilteringJsonGeneratorTests {
|
||||
public class YamlFilteringGeneratorTests extends AbstractFilteringJsonGeneratorTestCase {
|
||||
|
||||
@Override
|
||||
protected XContentType getXContentType() {
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.junit.Test;
|
|||
|
||||
import java.util.*;
|
||||
|
||||
public class ElectMasterServiceTest extends ESTestCase {
|
||||
public class ElectMasterServiceTests extends ESTestCase {
|
||||
|
||||
ElectMasterService electMasterService() {
|
||||
return new ElectMasterService(Settings.EMPTY, Version.CURRENT);
|
|
@ -35,7 +35,7 @@ import static org.hamcrest.core.IsNull.nullValue;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class ZenDiscoveryUnitTest extends ESTestCase {
|
||||
public class ZenDiscoveryUnitTests extends ESTestCase {
|
||||
|
||||
public void testShouldIgnoreNewClusterState() {
|
||||
ClusterName clusterName = new ClusterName("abc");
|
|
@ -68,7 +68,7 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
@LuceneTestCase.SuppressFileSystems("ExtrasFS") // TODO: fix test to work with ExtrasFS
|
||||
public class MetaDataStateFormatTest extends ESTestCase {
|
||||
public class MetaDataStateFormatTests extends ESTestCase {
|
||||
|
||||
|
||||
/**
|
||||
|
@ -349,7 +349,7 @@ public class MetaDataStateFormatTest extends ESTestCase {
|
|||
if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) { // corrupt a file that we do not necessarily need here....
|
||||
Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j + ".st");
|
||||
corruptedFiles.add(file);
|
||||
MetaDataStateFormatTest.corruptFile(file, logger);
|
||||
MetaDataStateFormatTests.corruptFile(file, logger);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -377,7 +377,7 @@ public class MetaDataStateFormatTest extends ESTestCase {
|
|||
if (corruptedFiles.contains(file)) {
|
||||
continue;
|
||||
}
|
||||
MetaDataStateFormatTest.corruptFile(file, logger);
|
||||
MetaDataStateFormatTests.corruptFile(file, logger);
|
||||
}
|
||||
try {
|
||||
format.loadLatestState(logger, dirList.toArray(new Path[0]));
|
|
@ -64,7 +64,7 @@ import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1;
|
|||
/**
|
||||
* This test just tests, if he pipelining works in general with out any connection the elasticsearch handler
|
||||
*/
|
||||
public class NettyHttpServerPipeliningTest extends ESTestCase {
|
||||
public class NettyHttpServerPipeliningTests extends ESTestCase {
|
||||
|
||||
private NetworkService networkService;
|
||||
private ThreadPool threadPool;
|
||||
|
@ -128,7 +128,7 @@ public class NettyHttpServerPipeliningTest extends ESTestCase {
|
|||
private final ExecutorService executorService;
|
||||
|
||||
public CustomNettyHttpServerTransport(Settings settings) {
|
||||
super(settings, NettyHttpServerPipeliningTest.this.networkService, NettyHttpServerPipeliningTest.this.bigArrays);
|
||||
super(settings, NettyHttpServerPipeliningTests.this.networkService, NettyHttpServerPipeliningTests.this.bigArrays);
|
||||
this.executorService = Executors.newFixedThreadPool(5);
|
||||
}
|
||||
|
|
@ -53,7 +53,7 @@ import static org.jboss.netty.util.CharsetUtil.UTF_8;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class HttpPipeliningHandlerTest extends ESTestCase {
|
||||
public class HttpPipeliningHandlerTests extends ESTestCase {
|
||||
|
||||
private static final long RESPONSE_TIMEOUT = 10000L;
|
||||
private static final long CONNECTION_TIMEOUT = 10000L;
|
|
@ -31,7 +31,7 @@ import org.elasticsearch.test.ESTokenStreamTestCase;
|
|||
/**
|
||||
* Verifies the behavior of PatternAnalyzer.
|
||||
*/
|
||||
public class PatternAnalyzerTest extends ESTokenStreamTestCase {
|
||||
public class PatternAnalyzerTests extends ESTokenStreamTestCase {
|
||||
|
||||
/**
|
||||
* Test PatternAnalyzer when it is configured with a non-word pattern.
|
|
@ -54,7 +54,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class SynonymsAnalysisTest extends ESTestCase {
|
||||
public class SynonymsAnalysisTests extends ESTestCase {
|
||||
|
||||
protected final ESLogger logger = Loggers.getLogger(getClass());
|
||||
private AnalysisService analysisService;
|
|
@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class BitSetFilterCacheTest extends ESTestCase {
|
||||
public class BitSetFilterCacheTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testInvalidateEntries() throws Exception {
|
|
@ -34,7 +34,7 @@ import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
|
|||
})
|
||||
@TimeoutSuite(millis = TimeUnits.HOUR)
|
||||
@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose")
|
||||
public class PostingsFormatTest extends BasePostingsFormatTestCase {
|
||||
public class PostingsFormatTests extends BasePostingsFormatTestCase {
|
||||
|
||||
@Override
|
||||
protected Codec getCodec() {
|
|
@ -28,7 +28,7 @@ import java.util.concurrent.TimeUnit;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class InternalEngineSettingsTest extends ESSingleNodeTestCase {
|
||||
public class InternalEngineSettingsTests extends ESSingleNodeTestCase {
|
||||
|
||||
public void testSettingsUpdate() {
|
||||
final IndexService service = createIndex("foo");
|
|
@ -24,13 +24,12 @@ import org.apache.lucene.index.DirectoryReader;
|
|||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
public abstract class AbstractFieldDataImplTests extends AbstractFieldDataTests {
|
||||
public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTestCase {
|
||||
|
||||
protected String one() {
|
||||
return "1";
|
|
@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
|
||||
public abstract class AbstractFieldDataTests extends ESSingleNodeTestCase {
|
||||
public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
|
||||
|
||||
protected IndexService indexService;
|
||||
protected IndexFieldDataService ifdService;
|
|
@ -35,7 +35,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
/**
|
||||
*/
|
||||
public abstract class AbstractNumericFieldDataTests extends AbstractFieldDataImplTests {
|
||||
public abstract class AbstractNumericFieldDataTestCase extends AbstractFieldDataImplTestCase {
|
||||
|
||||
@Override
|
||||
protected abstract FieldDataType getFieldDataType();
|
|
@ -70,7 +70,7 @@ import static org.hamcrest.Matchers.sameInstance;
|
|||
|
||||
/**
|
||||
*/
|
||||
public abstract class AbstractStringFieldDataTests extends AbstractFieldDataImplTests {
|
||||
public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataImplTestCase {
|
||||
|
||||
private void addField(Document d, String name, String value) {
|
||||
d.add(new StringField(name, value, Field.Store.YES));
|
|
@ -35,7 +35,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class BinaryDVFieldDataTests extends AbstractFieldDataTests {
|
||||
public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean hasDocValues() {
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.lucene.index.Term;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class DoubleFieldDataTests extends AbstractNumericFieldDataTests {
|
||||
public class DoubleFieldDataTests extends AbstractNumericFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
|
|
|
@ -56,7 +56,7 @@ import java.util.Set;
|
|||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
public class DuelFieldDataTests extends AbstractFieldDataTests {
|
||||
public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.Random;
|
|||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class FilterFieldDataTest extends AbstractFieldDataTests {
|
||||
public class FilterFieldDataTests extends AbstractFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
|
@ -26,7 +26,7 @@ import org.apache.lucene.index.Term;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class FloatFieldDataTests extends AbstractNumericFieldDataTests {
|
||||
public class FloatFieldDataTests extends AbstractNumericFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
|
|
|
@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.lessThan;
|
|||
/**
|
||||
* Tests for all integer types (byte, short, int, long).
|
||||
*/
|
||||
public class LongFieldDataTests extends AbstractNumericFieldDataTests {
|
||||
public class LongFieldDataTests extends AbstractNumericFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class PagedBytesStringFieldDataTests extends AbstractStringFieldDataTests {
|
||||
public class PagedBytesStringFieldDataTests extends AbstractStringFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.search.*;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
|
@ -49,7 +48,7 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class ParentChildFieldDataTests extends AbstractFieldDataTests {
|
||||
public class ParentChildFieldDataTests extends AbstractFieldDataTestCase {
|
||||
|
||||
private final String parentType = "parent";
|
||||
private final String childType = "child";
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.index.fielddata;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
|
||||
|
||||
public class SortedSetDVStringFieldDataTests extends AbstractStringFieldDataTests {
|
||||
public class SortedSetDVStringFieldDataTests extends AbstractStringFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
|
|
|
@ -33,7 +33,7 @@ import static org.hamcrest.CoreMatchers.containsString;
|
|||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.hamcrest.Matchers.hasToString;
|
||||
|
||||
public class MapperServiceTest extends ESSingleNodeTestCase {
|
||||
public class MapperServiceTests extends ESSingleNodeTestCase {
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
|
@ -37,7 +37,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class DoubleIndexingDocTest extends ESSingleNodeTestCase {
|
||||
public class DoubleIndexingDocTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Test
|
||||
public void testDoubleIndexingSameDoc() throws Exception {
|
|
@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class StoredNumericValuesTest extends ESSingleNodeTestCase {
|
||||
public class StoredNumericValuesTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Test
|
||||
public void testBytesAndNumericRepresentation() throws Exception {
|
|
@ -25,7 +25,7 @@ import org.junit.Test;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class CommonTermsQueryParserTest extends ESSingleNodeTestCase {
|
||||
public class CommonTermsQueryParserTests extends ESSingleNodeTestCase {
|
||||
@Test
|
||||
public void testWhenParsedQueryIsNullNoNullPointerExceptionIsThrown() throws IOException {
|
||||
final String index = "test-index";
|
|
@ -32,7 +32,7 @@ import java.util.Map;
|
|||
/**
|
||||
* Test building and serialising a template search request.
|
||||
* */
|
||||
public class TemplateQueryBuilderTest extends ESTestCase {
|
||||
public class TemplateQueryBuilderTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testJSONGeneration() throws IOException {
|
|
@ -59,7 +59,7 @@ import java.io.IOException;
|
|||
* Test parsing and executing a template request.
|
||||
*/
|
||||
// NOTE: this can't be migrated to ESSingleNodeTestCase because of the custom path.conf
|
||||
public class TemplateQueryParserTest extends ESTestCase {
|
||||
public class TemplateQueryParserTests extends ESTestCase {
|
||||
|
||||
private Injector injector;
|
||||
private QueryShardContext context;
|
|
@ -40,7 +40,7 @@ import org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter;
|
|||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.fielddata.AbstractFieldDataTests;
|
||||
import org.elasticsearch.index.fielddata.AbstractFieldDataTestCase;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
|
@ -55,7 +55,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
/**
|
||||
*/
|
||||
public abstract class AbstractNumberNestedSortingTests extends AbstractFieldDataTests {
|
||||
public abstract class AbstractNumberNestedSortingTestCase extends AbstractFieldDataTestCase {
|
||||
|
||||
@Test
|
||||
public void testNestedSorting() throws Exception {
|
|
@ -49,7 +49,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class DoubleNestedSortingTests extends AbstractNumberNestedSortingTests {
|
||||
public class DoubleNestedSortingTests extends AbstractNumberNestedSortingTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.search.MultiValueMode;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class LongNestedSortingTests extends AbstractNumberNestedSortingTests {
|
||||
public class LongNestedSortingTests extends AbstractNumberNestedSortingTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
|
|
|
@ -46,7 +46,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.TestUtil;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.fielddata.AbstractFieldDataTests;
|
||||
import org.elasticsearch.index.fielddata.AbstractFieldDataTestCase;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource;
|
||||
|
@ -65,7 +65,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class NestedSortingTests extends AbstractFieldDataTests {
|
||||
public class NestedSortingTests extends AbstractFieldDataTestCase {
|
||||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
|
|
|
@ -18,7 +18,10 @@
|
|||
*/
|
||||
package org.elasticsearch.index.shard;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
@ -38,6 +41,7 @@ import org.elasticsearch.cluster.routing.ShardRouting;
|
|||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||
import org.elasticsearch.cluster.routing.TestShardRouting;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
|
@ -49,6 +53,12 @@ import org.elasticsearch.env.NodeEnvironment;
|
|||
import org.elasticsearch.env.ShardLock;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.indexing.IndexingOperationListener;
|
||||
import org.elasticsearch.index.indexing.ShardIndexingService;
|
||||
import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.settings.IndexSettingsService;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
|
@ -64,9 +74,12 @@ import java.io.IOException;
|
|||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.EMPTY_PARAMS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
|
||||
|
@ -584,4 +597,93 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
assertTrue(xContent.contains(expectedSubSequence));
|
||||
}
|
||||
|
||||
private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, ParseContext.Document document, BytesReference source, Mapping mappingUpdate) {
|
||||
Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE);
|
||||
Field versionField = new NumericDocValuesField("_version", 0);
|
||||
document.add(uidField);
|
||||
document.add(versionField);
|
||||
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate);
|
||||
}
|
||||
|
||||
public void testPreIndex() throws IOException {
|
||||
createIndex("testpreindex");
|
||||
ensureGreen();
|
||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
IndexService test = indicesService.indexService("testpreindex");
|
||||
IndexShard shard = test.shard(0);
|
||||
ShardIndexingService shardIndexingService = shard.indexingService();
|
||||
final AtomicBoolean preIndexCalled = new AtomicBoolean(false);
|
||||
|
||||
shardIndexingService.addListener(new IndexingOperationListener() {
|
||||
@Override
|
||||
public Engine.Index preIndex(Engine.Index index) {
|
||||
preIndexCalled.set(true);
|
||||
return super.preIndex(index);
|
||||
}
|
||||
});
|
||||
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null);
|
||||
Engine.Index index = new Engine.Index(new Term("_uid", "1"), doc);
|
||||
shard.index(index);
|
||||
assertTrue(preIndexCalled.get());
|
||||
}
|
||||
|
||||
public void testPostIndex() throws IOException {
|
||||
createIndex("testpostindex");
|
||||
ensureGreen();
|
||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
IndexService test = indicesService.indexService("testpostindex");
|
||||
IndexShard shard = test.shard(0);
|
||||
ShardIndexingService shardIndexingService = shard.indexingService();
|
||||
final AtomicBoolean postIndexCalled = new AtomicBoolean(false);
|
||||
|
||||
shardIndexingService.addListener(new IndexingOperationListener() {
|
||||
@Override
|
||||
public void postIndex(Engine.Index index) {
|
||||
postIndexCalled.set(true);
|
||||
super.postIndex(index);
|
||||
}
|
||||
});
|
||||
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null);
|
||||
Engine.Index index = new Engine.Index(new Term("_uid", "1"), doc);
|
||||
shard.index(index);
|
||||
assertTrue(postIndexCalled.get());
|
||||
}
|
||||
|
||||
public void testPostIndexWithException() throws IOException {
|
||||
createIndex("testpostindexwithexception");
|
||||
ensureGreen();
|
||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||
IndexService test = indicesService.indexService("testpostindexwithexception");
|
||||
IndexShard shard = test.shard(0);
|
||||
ShardIndexingService shardIndexingService = shard.indexingService();
|
||||
|
||||
shard.close("Unexpected close", true);
|
||||
shard.state = IndexShardState.STARTED; // It will generate exception
|
||||
|
||||
final AtomicBoolean postIndexWithExceptionCalled = new AtomicBoolean(false);
|
||||
|
||||
shardIndexingService.addListener(new IndexingOperationListener() {
|
||||
@Override
|
||||
public void postIndex(Engine.Index index, Throwable ex) {
|
||||
assertNotNull(ex);
|
||||
postIndexWithExceptionCalled.set(true);
|
||||
super.postIndex(index, ex);
|
||||
}
|
||||
});
|
||||
|
||||
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null);
|
||||
Engine.Index index = new Engine.Index(new Term("_uid", "1"), doc);
|
||||
|
||||
try {
|
||||
shard.index(index);
|
||||
fail();
|
||||
}catch (IllegalIndexShardStateException e){
|
||||
|
||||
}
|
||||
|
||||
assertTrue(postIndexWithExceptionCalled.get());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ import java.io.IOException;
|
|||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class MergePolicySettingsTest extends ESTestCase {
|
||||
public class MergePolicySettingsTests extends ESTestCase {
|
||||
|
||||
protected final ShardId shardId = new ShardId(new Index("index"), 1);
|
||||
|
|
@ -58,7 +58,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
|||
|
||||
/** Separate test class from ShardPathTests because we need static (BeforeClass) setup to install mock filesystems... */
|
||||
@SuppressForbidden(reason = "ProviderMismatchException if I try to use PathUtils.getDefault instead")
|
||||
public class NewPathForShardTest extends ESTestCase {
|
||||
public class NewPathForShardTests extends ESTestCase {
|
||||
|
||||
// Sneakiness to install mock file stores so we can pretend how much free space we have on each path.data:
|
||||
private static MockFileStore aFileStore = new MockFileStore("mocka");
|
|
@ -39,7 +39,7 @@ import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
|
|||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
/** Tests upgrading old document versions from _uid payloads to _version docvalues */
|
||||
public class VersionFieldUpgraderTest extends ESTestCase {
|
||||
public class VersionFieldUpgraderTests extends ESTestCase {
|
||||
|
||||
/** Simple test: one doc in the old format, check that it looks correct */
|
||||
public void testUpgradeOneDocument() throws Exception {
|
|
@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.is;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class FileInfoTest extends ESTestCase {
|
||||
public class FileInfoTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testToFromXContent() throws IOException {
|
|
@ -27,7 +27,7 @@ import java.util.Random;
|
|||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class SlicedInputStreamTest extends ESTestCase {
|
||||
public class SlicedInputStreamTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void readRandom() throws IOException {
|
|
@ -29,7 +29,7 @@ import java.util.Set;
|
|||
|
||||
import static org.hamcrest.CoreMatchers.*;
|
||||
|
||||
public class DirectoryUtilsTest extends ESTestCase {
|
||||
public class DirectoryUtilsTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testGetLeave() throws IOException {
|
|
@ -37,7 +37,7 @@ import java.util.Locale;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class IndexStoreBWCTest extends ESSingleNodeTestCase {
|
||||
public class IndexStoreBWCTests extends ESSingleNodeTestCase {
|
||||
|
||||
|
||||
public void testOldCoreTypesFail() {
|
|
@ -65,7 +65,7 @@ import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
|||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
public class StoreTest extends ESTestCase {
|
||||
public class StoreTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testRefCount() throws IOException {
|
||||
|
@ -693,7 +693,7 @@ public class StoreTest extends ESTestCase {
|
|||
|
||||
public LuceneManagedDirectoryService(Random random, boolean preventDoubleWrite) {
|
||||
super(new ShardId("fake", 1), Settings.EMPTY);
|
||||
dir = StoreTest.newDirectory(random);
|
||||
dir = StoreTests.newDirectory(random);
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) dir).setPreventDoubleWrite(preventDoubleWrite);
|
||||
// TODO: fix this test to handle virus checker
|
|
@ -37,7 +37,7 @@ import java.util.concurrent.TimeUnit;
|
|||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
|
||||
public class IndicesServiceTest extends ESSingleNodeTestCase {
|
||||
public class IndicesServiceTests extends ESSingleNodeTestCase {
|
||||
|
||||
public IndicesService getIndicesService() {
|
||||
return getInstanceFromNode(IndicesService.class);
|
|
@ -36,7 +36,7 @@ import java.util.Map;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class SyncedFlushSingleNodeTest extends ESSingleNodeTestCase {
|
||||
public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase {
|
||||
|
||||
public void testModificationPreventsFlushing() throws InterruptedException {
|
||||
createIndex("test");
|
|
@ -40,7 +40,7 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
public class RecoveryStateTest extends ESTestCase {
|
||||
public class RecoveryStateTests extends ESTestCase {
|
||||
|
||||
abstract class Streamer<T extends Streamable> extends Thread {
|
||||
|
|
@ -38,7 +38,7 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class StartRecoveryRequestTest extends ESTestCase {
|
||||
public class StartRecoveryRequestTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testSerialization() throws Exception {
|
|
@ -26,7 +26,7 @@ import org.junit.Test;
|
|||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class RecoverySettingsTest extends ESSingleNodeTestCase {
|
||||
public class RecoverySettingsTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected boolean resetNodeAfterTest() {
|
|
@ -31,7 +31,7 @@ import java.util.List;
|
|||
import static org.elasticsearch.rest.action.support.RestTable.buildDisplayHeaders;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
public class RestTableTest extends ESTestCase {
|
||||
public class RestTableTests extends ESTestCase {
|
||||
|
||||
private Table table = new Table();
|
||||
private FakeRestRequest restRequest = new FakeRestRequest();
|
|
@ -38,7 +38,7 @@ import java.util.*;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class ScriptParameterParserTest extends ESTestCase {
|
||||
public class ScriptParameterParserTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testTokenDefaultInline() throws IOException {
|
|
@ -37,7 +37,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
/**
|
||||
* Mustache based templating test
|
||||
*/
|
||||
public class MustacheScriptEngineTest extends ESTestCase {
|
||||
public class MustacheScriptEngineTests extends ESTestCase {
|
||||
private MustacheScriptEngineService qe;
|
||||
private JsonEscapingMustacheFactory escaper;
|
||||
|
|
@ -31,7 +31,7 @@ import java.util.HashMap;
|
|||
/**
|
||||
* Figure out how Mustache works for the simplest use case. Leaving in here for now for reference.
|
||||
* */
|
||||
public class MustacheTest extends ESTestCase {
|
||||
public class MustacheTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void test() {
|
|
@ -55,7 +55,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class NestedAggregatorTest extends ESSingleNodeTestCase {
|
||||
public class NestedAggregatorTests extends ESSingleNodeTestCase {
|
||||
|
||||
@Test
|
||||
public void testResetRootDocId() throws Exception {
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue