fold ESTestCase into ElasticsearchTestCase

This commit is contained in:
Robert Muir 2015-04-17 20:35:28 -04:00
parent 84811a57d6
commit aa381a2775
30 changed files with 511 additions and 553 deletions

View File

@ -1,3 +0,0 @@
mvn test -Pdev -Dtests.seed=3BE26A0D85E40D93 -Dtests.class=org.elasticsearch.indices.state.OpenCloseIndexTests -Dtests.method="testOpenCloseWithDocs" -Des.logger.level=INFO -Dtests.heap.size=512m -Dtests.timezone=Atlantic/South_Georgia -Dtests.processors=8
mvn test -Pdev -Dtests.seed=3BE26A0D85E40D93 -Dtests.class=org.elasticsearch.indices.template.IndexTemplateFileLoadingTests -Dtests.method="testThatLoadingTemplateFromFileWorks" -Des.logger.level=INFO -Dtests.heap.size=512m -Dtests.locale=hu_HU -Dtests.timezone=Africa/Harare -Dtests.processors=8

View File

@ -41,7 +41,7 @@ import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
@ -56,7 +56,7 @@ import static org.hamcrest.Matchers.equalTo;
/** /**
*/ */
public class BlendedTermQueryTest extends ESTestCase { public class BlendedTermQueryTest extends ElasticsearchTestCase {
@Test @Test
public void testBooleanQuery() throws IOException { public void testBooleanQuery() throws IOException {

View File

@ -30,7 +30,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.UnicodeUtil;
import org.elasticsearch.search.highlight.HighlightUtils; import org.elasticsearch.search.highlight.HighlightUtils;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
@ -39,7 +39,7 @@ import java.util.*;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.notNullValue;
public class CustomPostingsHighlighterTests extends ESTestCase { public class CustomPostingsHighlighterTests extends ElasticsearchTestCase {
@Test @Test
public void testDiscreteHighlightingPerValue() throws Exception { public void testDiscreteHighlightingPerValue() throws Exception {

View File

@ -28,7 +28,7 @@ import org.apache.lucene.search.highlight.DefaultEncoder;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test; import org.junit.Test;
import java.io.BufferedReader; import java.io.BufferedReader;
@ -41,7 +41,7 @@ import java.util.Map;
import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.CoreMatchers.*;
public class XPostingsHighlighterTests extends ESTestCase { public class XPostingsHighlighterTests extends ElasticsearchTestCase {
/* /*
Tests changes needed to make possible to perform discrete highlighting. Tests changes needed to make possible to perform discrete highlighting.

View File

@ -25,7 +25,7 @@ import junit.framework.TestCase;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.ElasticsearchTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.ElasticsearchTokenStreamTestCase; import org.elasticsearch.test.ElasticsearchTokenStreamTestCase;
import org.junit.Ignore; import org.junit.Ignore;
@ -104,7 +104,7 @@ public class NamingConventionTests extends ElasticsearchTestCase {
} }
private boolean isTestCase(Class<?> clazz) { private boolean isTestCase(Class<?> clazz) {
return ElasticsearchTestCase.class.isAssignableFrom(clazz) || ESTestCase.class.isAssignableFrom(clazz) || ElasticsearchTokenStreamTestCase.class.isAssignableFrom(clazz) || LuceneTestCase.class.isAssignableFrom(clazz); return ElasticsearchTestCase.class.isAssignableFrom(clazz) || ElasticsearchTestCase.class.isAssignableFrom(clazz) || ElasticsearchTokenStreamTestCase.class.isAssignableFrom(clazz) || LuceneTestCase.class.isAssignableFrom(clazz);
} }
private Class<?> loadClass(String filename) throws ClassNotFoundException { private Class<?> loadClass(String filename) throws ClassNotFoundException {
@ -138,7 +138,7 @@ public class NamingConventionTests extends ElasticsearchTestCase {
String classesToSubclass = Joiner.on(',').join( String classesToSubclass = Joiner.on(',').join(
ElasticsearchTestCase.class.getSimpleName(), ElasticsearchTestCase.class.getSimpleName(),
ESTestCase.class.getSimpleName(), ElasticsearchTestCase.class.getSimpleName(),
ElasticsearchTokenStreamTestCase.class.getSimpleName(), ElasticsearchTokenStreamTestCase.class.getSimpleName(),
LuceneTestCase.class.getSimpleName()); LuceneTestCase.class.getSimpleName());
assertTrue("Not all subclasses of " + ElasticsearchTestCase.class.getSimpleName() + assertTrue("Not all subclasses of " + ElasticsearchTestCase.class.getSimpleName() +
@ -161,7 +161,7 @@ public class NamingConventionTests extends ElasticsearchTestCase {
public static final class WrongName extends ElasticsearchTestCase {} public static final class WrongName extends ElasticsearchTestCase {}
public static final class WrongNameTheSecond extends ESTestCase {} public static final class WrongNameTheSecond extends ElasticsearchTestCase {}
public static final class PlainUnit extends TestCase {} public static final class PlainUnit extends TestCase {}

View File

@ -42,7 +42,7 @@ import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.mapper.core.TypeParsers; import org.elasticsearch.index.mapper.core.TypeParsers;
import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction; import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
@ -55,7 +55,7 @@ import java.util.Set;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
public class TermVectorsUnitTests extends ESTestCase { public class TermVectorsUnitTests extends ElasticsearchTestCase {
@Test @Test
public void streamResponse() throws Exception { public void streamResponse() throws Exception {

View File

@ -26,7 +26,7 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
@ -36,7 +36,7 @@ import java.util.Set;
/** /**
* *
*/ */
public class LuceneTest extends ESTestCase { public class LuceneTest extends ElasticsearchTestCase {
/* /*

View File

@ -31,10 +31,10 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
/** Simple tests for this filterreader */ /** Simple tests for this filterreader */
public class ElasticsearchDirectoryReaderTests extends ESTestCase { public class ElasticsearchDirectoryReaderTests extends ElasticsearchTestCase {
/** Test that core cache key (needed for NRT) is working */ /** Test that core cache key (needed for NRT) is working */
public void testCoreCacheKey() throws Exception { public void testCoreCacheKey() throws Exception {

View File

@ -36,7 +36,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -49,7 +49,7 @@ import static org.hamcrest.Matchers.is;
/** /**
*/ */
public class FreqTermsEnumTests extends ESTestCase { public class FreqTermsEnumTests extends ElasticsearchTestCase {
private String[] terms; private String[] terms;
private IndexWriter iw; private IndexWriter iw;

View File

@ -28,9 +28,9 @@ import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.docset.AndDocIdSet; import org.elasticsearch.common.lucene.docset.AndDocIdSet;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
public class AndDocIdSetTests extends ESTestCase { public class AndDocIdSetTests extends ElasticsearchTestCase {
private static FixedBitSet randomBitSet(int numDocs) { private static FixedBitSet randomBitSet(int numDocs) {
FixedBitSet b = new FixedBitSet(numDocs); FixedBitSet b = new FixedBitSet(numDocs);

View File

@ -34,7 +34,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -49,7 +49,7 @@ import static org.hamcrest.core.IsEqual.equalTo;
/** /**
*/ */
public class XBooleanFilterTests extends ESTestCase { public class XBooleanFilterTests extends ElasticsearchTestCase {
private Directory directory; private Directory directory;
private LeafReader reader; private LeafReader reader;

View File

@ -35,7 +35,7 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.VersionFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
import org.elasticsearch.index.merge.policy.ElasticsearchMergePolicy; import org.elasticsearch.index.merge.policy.ElasticsearchMergePolicy;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.hamcrest.MatcherAssert; import org.hamcrest.MatcherAssert;
import org.junit.Test; import org.junit.Test;
@ -46,7 +46,7 @@ import java.util.Map;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.*;
public class VersionsTests extends ESTestCase { public class VersionsTests extends ElasticsearchTestCase {
public static DirectoryReader reopen(DirectoryReader reader) throws IOException { public static DirectoryReader reopen(DirectoryReader reader) throws IOException {
return reopen(reader, true); return reopen(reader, true);

View File

@ -90,7 +90,7 @@ import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogSizeMatcher; import org.elasticsearch.index.translog.TranslogSizeMatcher;
import org.elasticsearch.index.translog.fs.FsTranslog; import org.elasticsearch.index.translog.fs.FsTranslog;
import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.DummyShardLock;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.hamcrest.MatcherAssert; import org.hamcrest.MatcherAssert;
import org.junit.After; import org.junit.After;
@ -118,7 +118,7 @@ import static org.hamcrest.Matchers.nullValue;
// TODO: this guy isn't ready for mock filesystems yet // TODO: this guy isn't ready for mock filesystems yet
@SuppressFileSystems("*") @SuppressFileSystems("*")
public class InternalEngineTests extends ESTestCase { public class InternalEngineTests extends ElasticsearchTestCase {
protected final ShardId shardId = new ShardId(new Index("index"), 1); protected final ShardId shardId = new ShardId(new Index("index"), 1);

View File

@ -63,7 +63,7 @@ import org.elasticsearch.index.store.distributor.LeastUsedDistributor;
import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.fs.FsTranslog; import org.elasticsearch.index.translog.fs.FsTranslog;
import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.DummyShardLock;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.hamcrest.MatcherAssert; import org.hamcrest.MatcherAssert;
import org.junit.After; import org.junit.After;
@ -85,7 +85,7 @@ import static org.hamcrest.Matchers.nullValue;
/** /**
* TODO: document me! * TODO: document me!
*/ */
public class ShadowEngineTests extends ESTestCase { public class ShadowEngineTests extends ElasticsearchTestCase {
protected final ShardId shardId = new ShardId(new Index("index"), 1); protected final ShardId shardId = new ShardId(new Index("index"), 1);

View File

@ -24,9 +24,9 @@ import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.*; import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
public class ReplaceMissingTests extends ESTestCase { public class ReplaceMissingTests extends ElasticsearchTestCase {
public void test() throws Exception { public void test() throws Exception {
Directory dir = newDirectory(); Directory dir = newDirectory();

View File

@ -26,7 +26,7 @@ import org.apache.lucene.index.*;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test; import org.junit.Test;
import java.util.Locale; import java.util.Locale;
@ -37,7 +37,7 @@ import static org.hamcrest.core.IsNull.notNullValue;
/** /**
*/ */
public class ParentChildFilteredTermsEnumTests extends ESTestCase { public class ParentChildFilteredTermsEnumTests extends ElasticsearchTestCase {
@Test @Test
public void testSimple_twoFieldEachUniqueValue() throws Exception { public void testSimple_twoFieldEachUniqueValue() throws Exception {

View File

@ -36,10 +36,10 @@ import org.apache.lucene.util.TestUtil;
import org.elasticsearch.common.Numbers; import org.elasticsearch.common.Numbers;
import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.VersionFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
/** Tests upgrading old document versions from _uid payloads to _version docvalues */ /** Tests upgrading old document versions from _uid payloads to _version docvalues */
public class VersionFieldUpgraderTest extends ESTestCase { public class VersionFieldUpgraderTest extends ElasticsearchTestCase {
/** Simple test: one doc in the old format, check that it looks correct */ /** Simple test: one doc in the old format, check that it looks correct */
public void testUpgradeOneDocument() throws Exception { public void testUpgradeOneDocument() throws Exception {

View File

@ -25,11 +25,11 @@ import org.apache.lucene.index.*;
import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import java.io.IOException; import java.io.IOException;
public class ShardUtilsTests extends ESTestCase { public class ShardUtilsTests extends ElasticsearchTestCase {
public void testExtractShardId() throws IOException { public void testExtractShardId() throws IOException {
BaseDirectoryWrapper dir = newDirectory(); BaseDirectoryWrapper dir = newDirectory();

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.store; package org.elasticsearch.index.store;
import org.apache.lucene.store.*; import org.apache.lucene.store.*;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
@ -29,7 +29,7 @@ import java.util.Set;
import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.CoreMatchers.*;
public class DirectoryUtilsTest extends ESTestCase { public class DirectoryUtilsTest extends ElasticsearchTestCase {
@Test @Test
public void testGetLeave() throws IOException { public void testGetLeave() throws IOException {

View File

@ -26,7 +26,7 @@ import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
/** /**
* Simple tests for LegacyVerification (old segments) * Simple tests for LegacyVerification (old segments)
@ -34,7 +34,7 @@ import org.elasticsearch.test.ESTestCase;
* segments is not longer needed. * segments is not longer needed.
*/ */
@Deprecated @Deprecated
public class LegacyVerificationTests extends ESTestCase { public class LegacyVerificationTests extends ElasticsearchTestCase {
public void testAdler32() throws Exception { public void testAdler32() throws Exception {
Adler32 expected = new Adler32(); Adler32 expected = new Adler32();

View File

@ -42,7 +42,7 @@ import org.elasticsearch.index.store.distributor.Distributor;
import org.elasticsearch.index.store.distributor.LeastUsedDistributor; import org.elasticsearch.index.store.distributor.LeastUsedDistributor;
import org.elasticsearch.index.store.distributor.RandomWeightedDistributor; import org.elasticsearch.index.store.distributor.RandomWeightedDistributor;
import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.DummyShardLock;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
@ -57,7 +57,7 @@ import java.util.zip.Adler32;
import static com.carrotsearch.randomizedtesting.RandomizedTest.*; import static com.carrotsearch.randomizedtesting.RandomizedTest.*;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.*;
public class StoreTest extends ESTestCase { public class StoreTest extends ElasticsearchTestCase {
@Test @Test
public void testRefCount() throws IOException { public void testRefCount() throws IOException {

View File

@ -31,7 +31,7 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
@ -39,7 +39,7 @@ import java.io.StringReader;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
public class XMoreLikeThisTests extends ESTestCase { public class XMoreLikeThisTests extends ElasticsearchTestCase {
private void addDoc(RandomIndexWriter writer, String[] texts) throws IOException { private void addDoc(RandomIndexWriter writer, String[] texts) throws IOException {
Document doc = new Document(); Document doc = new Document();

View File

@ -34,7 +34,7 @@ import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext.NestedInnerHits.NestedChildrenFilter; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext.NestedInnerHits.NestedChildrenFilter;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test; import org.junit.Test;
import java.util.ArrayList; import java.util.ArrayList;
@ -44,7 +44,7 @@ import static org.hamcrest.Matchers.equalTo;
/** /**
*/ */
public class NestedChildrenFilterTest extends ESTestCase { public class NestedChildrenFilterTest extends ElasticsearchTestCase {
@Test @Test
public void testNestedChildrenFilter() throws Exception { public void testNestedChildrenFilter() throws Exception {

View File

@ -1,488 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test;
import com.carrotsearch.randomizedtesting.LifecycleScope;
import com.carrotsearch.randomizedtesting.RandomizedContext;
import com.carrotsearch.randomizedtesting.SysGlobals;
import com.carrotsearch.randomizedtesting.annotations.Listeners;
import com.carrotsearch.randomizedtesting.annotations.TestGroup;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryCache;
import org.apache.lucene.uninverting.UninvertingReader;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.test.junit.listeners.LoggingListener;
import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import java.io.Closeable;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.TimeZone;
/**
* The new base test class, with all the goodies
*/
@Listeners({
ReproduceInfoPrinter.class,
LoggingListener.class
})
@ThreadLeakScope(Scope.SUITE)
@ThreadLeakLingering(linger = 5000) // 5 sec lingering
@TimeoutSuite(millis = 20 * TimeUnits.MINUTE)
@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose")
@Ignore
@SuppressCodecs({"SimpleText", "Memory", "CheapBastard", "Direct"}) // slow ones
@LuceneTestCase.SuppressReproduceLine
public abstract class ESTestCase extends LuceneTestCase {
static {
SecurityHack.ensureInitialized();
}
// setup mock filesystems for this test run. we change PathUtils
// so that all accesses are plumbed thru any mock wrappers
@BeforeClass
public static void setUpFileSystem() {
try {
Field field = PathUtils.class.getDeclaredField("DEFAULT");
field.setAccessible(true);
field.set(null, LuceneTestCase.getBaseTempDirForTestClass().getFileSystem());
} catch (ReflectiveOperationException e) {
throw new RuntimeException();
}
}
@AfterClass
public static void restoreFileSystem() {
try {
Field field1 = PathUtils.class.getDeclaredField("ACTUAL_DEFAULT");
field1.setAccessible(true);
Field field2 = PathUtils.class.getDeclaredField("DEFAULT");
field2.setAccessible(true);
field2.set(null, field1.get(null));
} catch (ReflectiveOperationException e) {
throw new RuntimeException();
}
}
@BeforeClass
public static void setUpProcessors() {
int numCpu = TestUtil.nextInt(random(), 1, 4);
System.setProperty(EsExecutors.DEFAULT_SYSPROP, Integer.toString(numCpu));
assertEquals(numCpu, EsExecutors.boundedNumberOfProcessors(ImmutableSettings.EMPTY));
}
@AfterClass
public static void restoreProcessors() {
System.clearProperty(EsExecutors.DEFAULT_SYSPROP);
}
@Before
public void disableQueryCache() {
// TODO: Parent/child and other things does not work with the query cache
IndexSearcher.setDefaultQueryCache(null);
}
@After
public void ensureNoFieldCacheUse() {
// field cache should NEVER get loaded.
String[] entries = UninvertingReader.getUninvertedStats();
assertEquals("fieldcache must never be used, got=" + Arrays.toString(entries), 0, entries.length);
}
// old shit:
/**
* The number of concurrent JVMs used to run the tests, Default is <tt>1</tt>
*/
public static final int CHILD_JVM_COUNT = Integer.parseInt(System.getProperty(SysGlobals.CHILDVM_SYSPROP_JVM_COUNT, "1"));
/**
* The child JVM ordinal of this JVM. Default is <tt>0</tt>
*/
public static final int CHILD_JVM_ID = Integer.parseInt(System.getProperty(SysGlobals.CHILDVM_SYSPROP_JVM_ID, "0"));
/**
* Annotation for backwards compat tests
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = false, sysProperty = TESTS_BACKWARDS_COMPATIBILITY)
public @interface Backwards {
}
/**
* Key used to set the path for the elasticsearch executable used to run backwards compatibility tests from
* via the commandline -D{@value #TESTS_BACKWARDS_COMPATIBILITY}
*/
public static final String TESTS_BACKWARDS_COMPATIBILITY = "tests.bwc";
public static final String TESTS_BACKWARDS_COMPATIBILITY_VERSION = "tests.bwc.version";
/**
* Key used to set the path for the elasticsearch executable used to run backwards compatibility tests from
* via the commandline -D{@value #TESTS_BACKWARDS_COMPATIBILITY_PATH}
*/
public static final String TESTS_BACKWARDS_COMPATIBILITY_PATH = "tests.bwc.path";
/**
* Annotation for REST tests
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = true, sysProperty = TESTS_REST)
public @interface Rest {
}
/**
* Property that allows to control whether the REST tests are run (default) or not
*/
public static final String TESTS_REST = "tests.rest";
/**
* Annotation for integration tests
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = true, sysProperty = SYSPROP_INTEGRATION)
public @interface Integration {
}
// --------------------------------------------------------------------
// Test groups, system properties and other annotations modifying tests
// --------------------------------------------------------------------
/**
* @see #ignoreAfterMaxFailures
*/
public static final String SYSPROP_MAXFAILURES = "tests.maxfailures";
/**
* @see #ignoreAfterMaxFailures
*/
public static final String SYSPROP_FAILFAST = "tests.failfast";
public static final String SYSPROP_INTEGRATION = "tests.integration";
// -----------------------------------------------------------------
// Suite and test case setup/ cleanup.
// -----------------------------------------------------------------
/** MockFSDirectoryService sets this: */
public static boolean checkIndexFailed;
/**
* For subclasses to override. Overrides must call {@code super.setUp()}.
*/
@Override
public void setUp() throws Exception {
super.setUp();
checkIndexFailed = false;
}
/**
* For subclasses to override. Overrides must call {@code super.tearDown()}.
*/
@After
public void tearDown() throws Exception {
assertFalse("at least one shard failed CheckIndex", checkIndexFailed);
super.tearDown();
}
// -----------------------------------------------------------------
// Test facilities and facades for subclasses.
// -----------------------------------------------------------------
/**
* Registers a {@link Closeable} resource that should be closed after the test
* completes.
*
* @return <code>resource</code> (for call chaining).
*/
@Override
public <T extends Closeable> T closeAfterTest(T resource) {
return RandomizedContext.current().closeAtEnd(resource, LifecycleScope.TEST);
}
/**
* Registers a {@link Closeable} resource that should be closed after the suite
* completes.
*
* @return <code>resource</code> (for call chaining).
*/
public static <T extends Closeable> T closeAfterSuite(T resource) {
return RandomizedContext.current().closeAtEnd(resource, LifecycleScope.SUITE);
}
// old helper stuff, a lot of it is bad news and we should see if its all used
/**
* Returns a "scaled" random number between min and max (inclusive). The number of
* iterations will fall between [min, max], but the selection will also try to
* achieve the points below:
* <ul>
* <li>the multiplier can be used to move the number of iterations closer to min
* (if it is smaller than 1) or closer to max (if it is larger than 1). Setting
* the multiplier to 0 will always result in picking min.</li>
* <li>on normal runs, the number will be closer to min than to max.</li>
* <li>on nightly runs, the number will be closer to max than to min.</li>
* </ul>
*
* @see #multiplier()
*
* @param min Minimum (inclusive).
* @param max Maximum (inclusive).
* @return Returns a random number between min and max.
*/
public static int scaledRandomIntBetween(int min, int max) {
if (min < 0) throw new IllegalArgumentException("min must be >= 0: " + min);
if (min > max) throw new IllegalArgumentException("max must be >= min: " + min + ", " + max);
double point = Math.min(1, Math.abs(random().nextGaussian()) * 0.3) * RANDOM_MULTIPLIER;
double range = max - min;
int scaled = (int) Math.round(Math.min(point * range, range));
if (isNightly()) {
return max - scaled;
} else {
return min + scaled;
}
}
/**
* A random integer from <code>min</code> to <code>max</code> (inclusive).
*
* @see #scaledRandomIntBetween(int, int)
*/
public static int randomIntBetween(int min, int max) {
return RandomInts.randomIntBetween(random(), min, max);
}
/**
* Returns a "scaled" number of iterations for loops which can have a variable
* iteration count. This method is effectively
* an alias to {@link #scaledRandomIntBetween(int, int)}.
*/
public static int iterations(int min, int max) {
return scaledRandomIntBetween(min, max);
}
/**
* An alias for {@link #randomIntBetween(int, int)}.
*
* @see #scaledRandomIntBetween(int, int)
*/
public static int between(int min, int max) {
return randomIntBetween(min, max);
}
/**
* The exact opposite of {@link #rarely()}.
*/
public static boolean frequently() {
return !rarely();
}
public static boolean randomBoolean() {
return random().nextBoolean();
}
public static byte randomByte() { return (byte) getRandom().nextInt(); }
public static short randomShort() { return (short) getRandom().nextInt(); }
public static int randomInt() { return getRandom().nextInt(); }
public static float randomFloat() { return getRandom().nextFloat(); }
public static double randomDouble() { return getRandom().nextDouble(); }
public static long randomLong() { return getRandom().nextLong(); }
/**
* Making {@link Assume#assumeNotNull(Object...)} directly available.
*/
public static void assumeNotNull(Object... objects) {
Assume.assumeNotNull(objects);
}
/**
* Pick a random object from the given array. The array must not be empty.
*/
public static <T> T randomFrom(T... array) {
return RandomPicks.randomFrom(random(), array);
}
/**
* Pick a random object from the given list.
*/
public static <T> T randomFrom(List<T> list) {
return RandomPicks.randomFrom(random(), list);
}
/**
* Shortcut for {@link RandomizedContext#getRandom()}. Even though this method
* is static, it returns per-thread {@link Random} instance, so no race conditions
* can occur.
*
* <p>It is recommended that specific methods are used to pick random values.
*/
public static Random getRandom() {
return random();
}
/**
* A random integer from 0..max (inclusive).
*/
public static int randomInt(int max) {
return RandomInts.randomInt(getRandom(), max);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomAsciiOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomStrings.randomAsciiOfLengthBetween(getRandom(), minCodeUnits,
maxCodeUnits);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomAsciiOfLength(int codeUnits) {
return RandomStrings.randomAsciiOfLength(getRandom(), codeUnits);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomUnicodeOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomStrings.randomUnicodeOfLengthBetween(getRandom(),
minCodeUnits, maxCodeUnits);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomUnicodeOfLength(int codeUnits) {
return RandomStrings.randomUnicodeOfLength(getRandom(), codeUnits);
}
/** @see StringGenerator#ofCodePointsLength(Random, int, int) */
public static String randomUnicodeOfCodepointLengthBetween(int minCodePoints, int maxCodePoints) {
return RandomStrings.randomUnicodeOfCodepointLengthBetween(getRandom(),
minCodePoints, maxCodePoints);
}
/** @see StringGenerator#ofCodePointsLength(Random, int, int) */
public static String randomUnicodeOfCodepointLength(int codePoints) {
return RandomStrings
.randomUnicodeOfCodepointLength(getRandom(), codePoints);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomRealisticUnicodeOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomStrings.randomRealisticUnicodeOfLengthBetween(getRandom(),
minCodeUnits, maxCodeUnits);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomRealisticUnicodeOfLength(int codeUnits) {
return RandomStrings.randomRealisticUnicodeOfLength(getRandom(), codeUnits);
}
/** @see StringGenerator#ofCodePointsLength(Random, int, int) */
public static String randomRealisticUnicodeOfCodepointLengthBetween(
int minCodePoints, int maxCodePoints) {
return RandomStrings.randomRealisticUnicodeOfCodepointLengthBetween(
getRandom(), minCodePoints, maxCodePoints);
}
/** @see StringGenerator#ofCodePointsLength(Random, int, int) */
public static String randomRealisticUnicodeOfCodepointLength(int codePoints) {
return RandomStrings.randomRealisticUnicodeOfCodepointLength(getRandom(),
codePoints);
}
/**
* Return a random TimeZone from the available timezones on the system.
*
* <p>Warning: This test assumes the returned array of time zones is repeatable from jvm execution
* to jvm execution. It _may_ be different from jvm to jvm and as such, it can render
* tests execute in a different way.</p>
*/
public static TimeZone randomTimeZone() {
final String[] availableIDs = TimeZone.getAvailableIDs();
Arrays.sort(availableIDs);
return TimeZone.getTimeZone(randomFrom(availableIDs));
}
/**
* Shortcut for {@link RandomizedContext#current()}.
*/
public static RandomizedContext getContext() {
return RandomizedContext.current();
}
/**
* Returns true if we're running nightly tests.
* @see Nightly
*/
public static boolean isNightly() {
return getContext().isNightly();
}
/**
* Returns a non-negative random value smaller or equal <code>max</code>. The value
* picked is affected by {@link #isNightly()} and {@link #multiplier()}.
*
* <p>This method is effectively an alias to:
* <pre>
* scaledRandomIntBetween(0, max)
* </pre>
*
* @see #scaledRandomIntBetween(int, int)
*/
public static int atMost(int max) {
if (max < 0) throw new IllegalArgumentException("atMost requires non-negative argument: " + max);
return scaledRandomIntBetween(0, max);
}
/**
* Making {@link Assume#assumeTrue(boolean)} directly available.
*/
public void assumeTrue(boolean condition) {
assumeTrue("caller was too lazy to provide a reason", condition);
}
}

View File

@ -81,7 +81,7 @@ import static org.hamcrest.Matchers.is;
* *
*/ */
// the transportClientRatio is tricky here since we don't fully control the cluster nodes // the transportClientRatio is tricky here since we don't fully control the cluster nodes
@ESTestCase.Backwards @ElasticsearchTestCase.Backwards
@ElasticsearchIntegrationTest.ClusterScope(minNumDataNodes = 0, maxNumDataNodes = 2, scope = ElasticsearchIntegrationTest.Scope.SUITE, numClientNodes = 0, transportClientRatio = 0.0) @ElasticsearchIntegrationTest.ClusterScope(minNumDataNodes = 0, maxNumDataNodes = 2, scope = ElasticsearchIntegrationTest.Scope.SUITE, numClientNodes = 0, transportClientRatio = 0.0)
@Ignore @Ignore
public abstract class ElasticsearchBackwardsCompatIntegrationTest extends ElasticsearchIntegrationTest { public abstract class ElasticsearchBackwardsCompatIntegrationTest extends ElasticsearchIntegrationTest {

View File

@ -226,7 +226,7 @@ import static org.hamcrest.Matchers.notNullValue;
* </p> * </p>
*/ */
@Ignore @Ignore
@ESTestCase.Integration @ElasticsearchTestCase.Integration
public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase { public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase {
/** node names of the corresponding clusters will start with these prefixes */ /** node names of the corresponding clusters will start with these prefixes */

View File

@ -18,13 +18,29 @@
*/ */
package org.elasticsearch.test; package org.elasticsearch.test;
import com.carrotsearch.randomizedtesting.LifecycleScope;
import com.carrotsearch.randomizedtesting.RandomizedContext;
import com.carrotsearch.randomizedtesting.SysGlobals;
import com.carrotsearch.randomizedtesting.annotations.Listeners;
import com.carrotsearch.randomizedtesting.annotations.TestGroup;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.uninverting.UninvertingReader;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
import org.apache.lucene.util.LuceneTestCase.Nightly;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.client.Requests; import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -36,32 +52,46 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsAbortPolicy; import org.elasticsearch.common.util.concurrent.EsAbortPolicy;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.test.cache.recycler.MockBigArrays; import org.elasticsearch.test.cache.recycler.MockBigArrays;
import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler; import org.elasticsearch.test.cache.recycler.MockPageCacheRecycler;
import org.elasticsearch.test.junit.listeners.LoggingListener;
import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
import org.elasticsearch.test.search.MockSearchService; import org.elasticsearch.test.search.MockSearchService;
import org.elasticsearch.test.store.MockDirectoryHelper; import org.elasticsearch.test.store.MockDirectoryHelper;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.junit.*; import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.util.ArrayList;
import java.util.*; import java.util.Arrays;
import java.util.Collections;
import java.util.Formatter;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
@ -73,7 +103,426 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllS
/** /**
* Base testcase for randomized unit testing with Elasticsearch * Base testcase for randomized unit testing with Elasticsearch
*/ */
public abstract class ElasticsearchTestCase extends ESTestCase { @Listeners({
ReproduceInfoPrinter.class,
LoggingListener.class
})
@ThreadLeakScope(Scope.SUITE)
@ThreadLeakLingering(linger = 5000) // 5 sec lingering
@TimeoutSuite(millis = 20 * TimeUnits.MINUTE)
@LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose")
@Ignore
@SuppressCodecs({"SimpleText", "Memory", "CheapBastard", "Direct"}) // slow ones
@LuceneTestCase.SuppressReproduceLine
public abstract class ElasticsearchTestCase extends LuceneTestCase {
static {
SecurityHack.ensureInitialized();
}
// setup mock filesystems for this test run. we change PathUtils
// so that all accesses are plumbed thru any mock wrappers
@BeforeClass
public static void setUpFileSystem() {
try {
Field field = PathUtils.class.getDeclaredField("DEFAULT");
field.setAccessible(true);
field.set(null, LuceneTestCase.getBaseTempDirForTestClass().getFileSystem());
} catch (ReflectiveOperationException e) {
throw new RuntimeException();
}
}
@AfterClass
public static void restoreFileSystem() {
try {
Field field1 = PathUtils.class.getDeclaredField("ACTUAL_DEFAULT");
field1.setAccessible(true);
Field field2 = PathUtils.class.getDeclaredField("DEFAULT");
field2.setAccessible(true);
field2.set(null, field1.get(null));
} catch (ReflectiveOperationException e) {
throw new RuntimeException();
}
}
@BeforeClass
public static void setUpProcessors() {
int numCpu = TestUtil.nextInt(random(), 1, 4);
System.setProperty(EsExecutors.DEFAULT_SYSPROP, Integer.toString(numCpu));
assertEquals(numCpu, EsExecutors.boundedNumberOfProcessors(ImmutableSettings.EMPTY));
}
@AfterClass
public static void restoreProcessors() {
System.clearProperty(EsExecutors.DEFAULT_SYSPROP);
}
@Before
public void disableQueryCache() {
// TODO: Parent/child and other things does not work with the query cache
IndexSearcher.setDefaultQueryCache(null);
}
@After
public void ensureNoFieldCacheUse() {
// field cache should NEVER get loaded.
String[] entries = UninvertingReader.getUninvertedStats();
assertEquals("fieldcache must never be used, got=" + Arrays.toString(entries), 0, entries.length);
}
// old shit:
/**
* The number of concurrent JVMs used to run the tests, Default is <tt>1</tt>
*/
public static final int CHILD_JVM_COUNT = Integer.parseInt(System.getProperty(SysGlobals.CHILDVM_SYSPROP_JVM_COUNT, "1"));
/**
* The child JVM ordinal of this JVM. Default is <tt>0</tt>
*/
public static final int CHILD_JVM_ID = Integer.parseInt(System.getProperty(SysGlobals.CHILDVM_SYSPROP_JVM_ID, "0"));
/**
* Annotation for backwards compat tests
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = false, sysProperty = TESTS_BACKWARDS_COMPATIBILITY)
public @interface Backwards {
}
/**
* Key used to set the path for the elasticsearch executable used to run backwards compatibility tests from
* via the commandline -D{@value #TESTS_BACKWARDS_COMPATIBILITY}
*/
public static final String TESTS_BACKWARDS_COMPATIBILITY = "tests.bwc";
public static final String TESTS_BACKWARDS_COMPATIBILITY_VERSION = "tests.bwc.version";
/**
* Key used to set the path for the elasticsearch executable used to run backwards compatibility tests from
* via the commandline -D{@value #TESTS_BACKWARDS_COMPATIBILITY_PATH}
*/
public static final String TESTS_BACKWARDS_COMPATIBILITY_PATH = "tests.bwc.path";
/**
* Annotation for REST tests
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = true, sysProperty = TESTS_REST)
public @interface Rest {
}
/**
* Property that allows to control whether the REST tests are run (default) or not
*/
public static final String TESTS_REST = "tests.rest";
/**
* Annotation for integration tests
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = true, sysProperty = SYSPROP_INTEGRATION)
public @interface Integration {
}
// --------------------------------------------------------------------
// Test groups, system properties and other annotations modifying tests
// --------------------------------------------------------------------
/**
* @see #ignoreAfterMaxFailures
*/
public static final String SYSPROP_MAXFAILURES = "tests.maxfailures";
/**
* @see #ignoreAfterMaxFailures
*/
public static final String SYSPROP_FAILFAST = "tests.failfast";
public static final String SYSPROP_INTEGRATION = "tests.integration";
// -----------------------------------------------------------------
// Suite and test case setup/ cleanup.
// -----------------------------------------------------------------
/** MockFSDirectoryService sets this: */
public static boolean checkIndexFailed;
/**
* For subclasses to override. Overrides must call {@code super.setUp()}.
*/
@Override
public void setUp() throws Exception {
super.setUp();
checkIndexFailed = false;
}
/**
* For subclasses to override. Overrides must call {@code super.tearDown()}.
*/
@After
public void tearDown() throws Exception {
assertFalse("at least one shard failed CheckIndex", checkIndexFailed);
super.tearDown();
}
// -----------------------------------------------------------------
// Test facilities and facades for subclasses.
// -----------------------------------------------------------------
/**
* Registers a {@link Closeable} resource that should be closed after the test
* completes.
*
* @return <code>resource</code> (for call chaining).
*/
@Override
public <T extends Closeable> T closeAfterTest(T resource) {
return RandomizedContext.current().closeAtEnd(resource, LifecycleScope.TEST);
}
/**
* Registers a {@link Closeable} resource that should be closed after the suite
* completes.
*
* @return <code>resource</code> (for call chaining).
*/
public static <T extends Closeable> T closeAfterSuite(T resource) {
return RandomizedContext.current().closeAtEnd(resource, LifecycleScope.SUITE);
}
// old helper stuff, a lot of it is bad news and we should see if its all used
/**
* Returns a "scaled" random number between min and max (inclusive). The number of
* iterations will fall between [min, max], but the selection will also try to
* achieve the points below:
* <ul>
* <li>the multiplier can be used to move the number of iterations closer to min
* (if it is smaller than 1) or closer to max (if it is larger than 1). Setting
* the multiplier to 0 will always result in picking min.</li>
* <li>on normal runs, the number will be closer to min than to max.</li>
* <li>on nightly runs, the number will be closer to max than to min.</li>
* </ul>
*
* @see #multiplier()
*
* @param min Minimum (inclusive).
* @param max Maximum (inclusive).
* @return Returns a random number between min and max.
*/
public static int scaledRandomIntBetween(int min, int max) {
if (min < 0) throw new IllegalArgumentException("min must be >= 0: " + min);
if (min > max) throw new IllegalArgumentException("max must be >= min: " + min + ", " + max);
double point = Math.min(1, Math.abs(random().nextGaussian()) * 0.3) * RANDOM_MULTIPLIER;
double range = max - min;
int scaled = (int) Math.round(Math.min(point * range, range));
if (isNightly()) {
return max - scaled;
} else {
return min + scaled;
}
}
/**
* A random integer from <code>min</code> to <code>max</code> (inclusive).
*
* @see #scaledRandomIntBetween(int, int)
*/
public static int randomIntBetween(int min, int max) {
return RandomInts.randomIntBetween(random(), min, max);
}
/**
* Returns a "scaled" number of iterations for loops which can have a variable
* iteration count. This method is effectively
* an alias to {@link #scaledRandomIntBetween(int, int)}.
*/
public static int iterations(int min, int max) {
return scaledRandomIntBetween(min, max);
}
/**
* An alias for {@link #randomIntBetween(int, int)}.
*
* @see #scaledRandomIntBetween(int, int)
*/
public static int between(int min, int max) {
return randomIntBetween(min, max);
}
/**
* The exact opposite of {@link #rarely()}.
*/
public static boolean frequently() {
return !rarely();
}
public static boolean randomBoolean() {
return random().nextBoolean();
}
public static byte randomByte() { return (byte) getRandom().nextInt(); }
public static short randomShort() { return (short) getRandom().nextInt(); }
public static int randomInt() { return getRandom().nextInt(); }
public static float randomFloat() { return getRandom().nextFloat(); }
public static double randomDouble() { return getRandom().nextDouble(); }
public static long randomLong() { return getRandom().nextLong(); }
/**
* Making {@link Assume#assumeNotNull(Object...)} directly available.
*/
public static void assumeNotNull(Object... objects) {
Assume.assumeNotNull(objects);
}
/**
* Pick a random object from the given array. The array must not be empty.
*/
public static <T> T randomFrom(T... array) {
return RandomPicks.randomFrom(random(), array);
}
/**
* Pick a random object from the given list.
*/
public static <T> T randomFrom(List<T> list) {
return RandomPicks.randomFrom(random(), list);
}
/**
* Shortcut for {@link RandomizedContext#getRandom()}. Even though this method
* is static, it returns per-thread {@link Random} instance, so no race conditions
* can occur.
*
* <p>It is recommended that specific methods are used to pick random values.
*/
public static Random getRandom() {
return random();
}
/**
* A random integer from 0..max (inclusive).
*/
public static int randomInt(int max) {
return RandomInts.randomInt(getRandom(), max);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomAsciiOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomStrings.randomAsciiOfLengthBetween(getRandom(), minCodeUnits,
maxCodeUnits);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomAsciiOfLength(int codeUnits) {
return RandomStrings.randomAsciiOfLength(getRandom(), codeUnits);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomUnicodeOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomStrings.randomUnicodeOfLengthBetween(getRandom(),
minCodeUnits, maxCodeUnits);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomUnicodeOfLength(int codeUnits) {
return RandomStrings.randomUnicodeOfLength(getRandom(), codeUnits);
}
/** @see StringGenerator#ofCodePointsLength(Random, int, int) */
public static String randomUnicodeOfCodepointLengthBetween(int minCodePoints, int maxCodePoints) {
return RandomStrings.randomUnicodeOfCodepointLengthBetween(getRandom(),
minCodePoints, maxCodePoints);
}
/** @see StringGenerator#ofCodePointsLength(Random, int, int) */
public static String randomUnicodeOfCodepointLength(int codePoints) {
return RandomStrings
.randomUnicodeOfCodepointLength(getRandom(), codePoints);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomRealisticUnicodeOfLengthBetween(int minCodeUnits, int maxCodeUnits) {
return RandomStrings.randomRealisticUnicodeOfLengthBetween(getRandom(),
minCodeUnits, maxCodeUnits);
}
/** @see StringGenerator#ofCodeUnitsLength(Random, int, int) */
public static String randomRealisticUnicodeOfLength(int codeUnits) {
return RandomStrings.randomRealisticUnicodeOfLength(getRandom(), codeUnits);
}
/** @see StringGenerator#ofCodePointsLength(Random, int, int) */
public static String randomRealisticUnicodeOfCodepointLengthBetween(
int minCodePoints, int maxCodePoints) {
return RandomStrings.randomRealisticUnicodeOfCodepointLengthBetween(
getRandom(), minCodePoints, maxCodePoints);
}
/** @see StringGenerator#ofCodePointsLength(Random, int, int) */
public static String randomRealisticUnicodeOfCodepointLength(int codePoints) {
return RandomStrings.randomRealisticUnicodeOfCodepointLength(getRandom(),
codePoints);
}
/**
* Return a random TimeZone from the available timezones on the system.
*
* <p>Warning: This test assumes the returned array of time zones is repeatable from jvm execution
* to jvm execution. It _may_ be different from jvm to jvm and as such, it can render
* tests execute in a different way.</p>
*/
public static TimeZone randomTimeZone() {
final String[] availableIDs = TimeZone.getAvailableIDs();
Arrays.sort(availableIDs);
return TimeZone.getTimeZone(randomFrom(availableIDs));
}
/**
* Shortcut for {@link RandomizedContext#current()}.
*/
public static RandomizedContext getContext() {
return RandomizedContext.current();
}
/**
* Returns true if we're running nightly tests.
* @see Nightly
*/
public static boolean isNightly() {
return getContext().isNightly();
}
/**
* Returns a non-negative random value smaller or equal <code>max</code>. The value
* picked is affected by {@link #isNightly()} and {@link #multiplier()}.
*
* <p>This method is effectively an alias to:
* <pre>
* scaledRandomIntBetween(0, max)
* </pre>
*
* @see #scaledRandomIntBetween(int, int)
*/
public static int atMost(int max) {
if (max < 0) throw new IllegalArgumentException("atMost requires non-negative argument: " + max);
return scaledRandomIntBetween(0, max);
}
/**
* Making {@link Assume#assumeTrue(boolean)} directly available.
*/
public void assumeTrue(boolean condition) {
assumeTrue("caller was too lazy to provide a reason", condition);
}
private static Thread.UncaughtExceptionHandler defaultHandler; private static Thread.UncaughtExceptionHandler defaultHandler;

View File

@ -25,7 +25,7 @@ import com.carrotsearch.randomizedtesting.TraceFormatting;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.ElasticsearchTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.InternalTestCluster;
import org.junit.internal.AssumptionViolatedException; import org.junit.internal.AssumptionViolatedException;

View File

@ -32,7 +32,7 @@ import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node; import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.elasticsearch.test.rest.client.RestException; import org.elasticsearch.test.rest.client.RestException;
@ -58,7 +58,7 @@ import java.util.*;
//tests distribution disabled for now since it causes reporting problems, //tests distribution disabled for now since it causes reporting problems,
// due to the non unique suite name // due to the non unique suite name
//@ReplicateOnEachVm //@ReplicateOnEachVm
@ESTestCase.Rest @ElasticsearchTestCase.Rest
@ClusterScope(randomDynamicTemplates = false) @ClusterScope(randomDynamicTemplates = false)
@TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // timeout the suite after 40min and fail the test. @TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // timeout the suite after 40min and fail the test.
public class ElasticsearchRestTests extends ElasticsearchIntegrationTest { public class ElasticsearchRestTests extends ElasticsearchIntegrationTest {

View File

@ -44,7 +44,7 @@ import org.elasticsearch.index.store.distributor.Distributor;
import org.elasticsearch.index.store.fs.FsDirectoryService; import org.elasticsearch.index.store.fs.FsDirectoryService;
import org.elasticsearch.indices.IndicesLifecycle; import org.elasticsearch.indices.IndicesLifecycle;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest;
import java.io.IOException; import java.io.IOException;
@ -131,7 +131,7 @@ public class MockFSDirectoryService extends FsDirectoryService {
return; return;
} }
if (IndexWriter.isLocked(dir)) { if (IndexWriter.isLocked(dir)) {
ESTestCase.checkIndexFailed = true; ElasticsearchTestCase.checkIndexFailed = true;
throw new IllegalStateException("IndexWriter is still open on shard " + shardId); throw new IllegalStateException("IndexWriter is still open on shard " + shardId);
} }
try (CheckIndex checkIndex = new CheckIndex(dir)) { try (CheckIndex checkIndex = new CheckIndex(dir)) {
@ -141,7 +141,7 @@ public class MockFSDirectoryService extends FsDirectoryService {
out.flush(); out.flush();
CheckIndex.Status status = checkIndex.checkIndex(); CheckIndex.Status status = checkIndex.checkIndex();
if (!status.clean) { if (!status.clean) {
ESTestCase.checkIndexFailed = true; ElasticsearchTestCase.checkIndexFailed = true;
logger.warn("check index [failure] index files={}\n{}", logger.warn("check index [failure] index files={}\n{}",
Arrays.toString(dir.listAll()), Arrays.toString(dir.listAll()),
new String(os.bytes().toBytes(), Charsets.UTF_8)); new String(os.bytes().toBytes(), Charsets.UTF_8));