Merge pull request #10656 from elastic/mockfilesystem

Integrate better with lucene test framework and mockfilesystems
This commit is contained in:
Robert Muir 2015-04-20 08:14:06 -04:00
commit db096b4404
225 changed files with 1848 additions and 2026 deletions

View File

@ -260,7 +260,7 @@ The REST tests are run automatically when executing the maven test command. To r
REST tests use the following command:
---------------------------------------------------------------------------
mvn test -Dtests.class=org.elasticsearch.test.rest.ElasticsearchRestTests
mvn test -Dtests.filter="@Rest"
---------------------------------------------------------------------------
`ElasticsearchRestTests` is the executable test class that runs all the

View File

@ -33,3 +33,6 @@ java.nio.file.Path#toFile()
@defaultMessage Don't use deprecated lucene apis
org.apache.lucene.index.DocsEnum
org.apache.lucene.index.DocsAndPositionsEnum
java.nio.file.Paths @ Use PathUtils.get instead.
java.nio.file.FileSystems#getDefault() @ use PathUtils.getDefault instead.

View File

@ -16,3 +16,5 @@
com.carrotsearch.randomizedtesting.RandomizedTest#globalTempDir() @ Use newTempDirPath() instead
com.carrotsearch.randomizedtesting.annotations.Seed @ Don't commit hardcoded seeds
org.apache.lucene.codecs.Codec#setDefault(org.apache.lucene.codecs.Codec) @ Use the SuppressCodecs("*") annotation instead

34
pom.xml
View File

@ -32,7 +32,8 @@
<properties>
<lucene.version>5.2.0</lucene.version>
<lucene.maven.version>5.2.0-snapshot-1674183</lucene.maven.version>
<lucene.maven.version>5.2.0-snapshot-1674576</lucene.maven.version>
<testframework.version>2.1.14</testframework.version>
<tests.jvms>auto</tests.jvms>
<tests.shuffle>true</tests.shuffle>
<tests.output>onerror</tests.output>
@ -40,7 +41,8 @@
<tests.bwc.path>${project.basedir}/backwards</tests.bwc.path>
<tests.locale>random</tests.locale>
<tests.timezone>random</tests.timezone>
<es.logger.level>INFO</es.logger.level>
<tests.slow>false</tests.slow>
<es.logger.level>ERROR</es.logger.level>
<tests.heap.size>512m</tests.heap.size>
<tests.heapdump.path>${basedir}/logs/</tests.heapdump.path>
<tests.topn>5</tests.topn>
@ -66,7 +68,7 @@
<repository>
<id>lucene-snapshots</id>
<name>Lucene Snapshots</name>
<url>https://download.elastic.co/lucenesnapshots/1674183</url>
<url>https://download.elastic.co/lucenesnapshots/1674576</url>
</repository>
</repositories>
@ -80,7 +82,7 @@
<dependency>
<groupId>com.carrotsearch.randomizedtesting</groupId>
<artifactId>randomizedtesting-runner</artifactId>
<version>2.1.13</version>
<version>${testframework.version}</version>
<scope>test</scope>
</dependency>
<dependency>
@ -501,7 +503,7 @@
<plugin>
<groupId>com.carrotsearch.randomizedtesting</groupId>
<artifactId>junit4-maven-plugin</artifactId>
<version>2.1.13</version>
<version>${testframework.version}</version>
<executions>
<execution>
<id>tests</id>
@ -510,9 +512,10 @@
<goal>junit4</goal>
</goals>
<configuration>
<heartbeat>20</heartbeat>
<heartbeat>10</heartbeat>
<jvmOutputAction>pipe,warn</jvmOutputAction>
<leaveTemporary>true</leaveTemporary>
<ifNoTests>fail</ifNoTests>
<listeners>
<report-ant-xml mavenExtensions="true"
dir="${project.build.directory}/surefire-reports"/>
@ -525,7 +528,19 @@
showStatusFailure="true"
showStatusIgnored="true"
showSuiteSummary="true"
timestamps="false"/>
timestamps="false">
<filtertrace>
<!-- custom filters: we carefully only omit test infra noise here -->
<containsstring contains=".SlaveMain." />
<containsregex pattern="^(\s+at )(org\.junit\.)" />
<!-- also includes anonymous classes inside these two: -->
<containsregex pattern="^(\s+at )(com\.carrotsearch\.randomizedtesting.RandomizedRunner)" />
<containsregex pattern="^(\s+at )(com\.carrotsearch\.randomizedtesting.ThreadLeakControl)" />
<containsregex pattern="^(\s+at )(com\.carrotsearch\.randomizedtesting.rules\.)" />
<containsregex pattern="^(\s+at )(org\.apache\.lucene.util\.TestRule)" />
<containsregex pattern="^(\s+at )(org\.apache\.lucene.util\.AbstractBeforeAfterRule)" />
</filtertrace>
</report-text>
<report-execution-times historyLength="20" file="${basedir}/${execution.hint.file}"/>
</listeners>
<assertions>
@ -561,7 +576,8 @@
<sysouts>${tests.verbose}</sysouts>
<seed>${tests.seed}</seed>
<haltOnFailure>${tests.failfast}</haltOnFailure>
<uniqueSuiteNames>false</uniqueSuiteNames>
<!-- enforce unique suite names, or reporting stuff can be screwed up -->
<uniqueSuiteNames>true</uniqueSuiteNames>
<systemProperties>
<!-- we use './temp' since this is per JVM and tests are forbidden from writing to CWD -->
<java.io.tmpdir>./temp</java.io.tmpdir>
@ -570,7 +586,6 @@
<tests.bwc.path>${tests.bwc.path}</tests.bwc.path>
<tests.bwc.version>${tests.bwc.version}</tests.bwc.version>
<tests.jvm.argline>${tests.jvm.argline}</tests.jvm.argline>
<tests.processors>${tests.processors}</tests.processors>
<tests.appendseed>${tests.appendseed}</tests.appendseed>
<tests.iters>${tests.iters}</tests.iters>
<tests.maxfailures>${tests.maxfailures}</tests.maxfailures>
@ -1626,6 +1641,7 @@
<version>2.9</version>
<configuration>
<buildOutputDirectory>eclipse-build</buildOutputDirectory>
<downloadSources>true</downloadSources>
</configuration>
</plugin>
</plugins>

View File

@ -65,7 +65,7 @@ skipped, and the reason why the tests are skipped. For instance:
....
"Parent":
- skip:
version: "0 - 0.90.2"
version: "0.20.1 - 0.90.2"
reason: Delete ignores the parent param
- do:
@ -75,14 +75,17 @@ skipped, and the reason why the tests are skipped. For instance:
All tests in the file following the skip statement should be skipped if:
`min <= current <= max`.
The `version` range should always have an upper bound. Versions should
either have each version part compared numerically, or should be converted
to a string with sufficient digits to allow string comparison, eg
The `version` range can leave either bound empty, which means "open ended".
For instance:
....
"Parent":
- skip:
version: "1.0.0.Beta1 - "
reason: Delete ignores the parent param
0.90.2 -> 000-090-002
Snapshot versions and versions of the form `1.0.0.Beta1` can be treated
as the rounded down version, eg `1.0.0`.
- do:
... test definitions ...
....
The skip section can also be used to list new features that need to be
supported in order to run a test. This way the up-to-date runners will

View File

@ -1,7 +1,7 @@
---
setup:
- skip:
version: 0 - 999
version: " - "
reason: leaves transient metadata behind, need to fix it
---
"Test put settings":

View File

@ -166,7 +166,7 @@ setup:
"Should return test_index_3 if expand_wildcards=closed":
- skip:
version: "0 - 2.0.0"
version: " - 2.0.0"
reason: Requires fix for issue 7258
- do:

View File

@ -202,7 +202,7 @@ setup:
"Getting alias on an non-existent index should return 404":
- skip:
version: 1 - 999
version: "1.0.0.Beta1 - "
reason: not implemented yet
- do:
catch: missing

View File

@ -81,7 +81,7 @@ setup:
---
"put settings in list of indices":
- skip:
version: 1 - 999
version: " - "
reason: list of indices not implemented yet
- do:
indices.put_settings:

View File

@ -2,7 +2,7 @@
"Metadata Fields":
- skip:
version: "0 - 999"
version: " - "
reason: "Update doesn't return metadata fields, waiting for #3259"
- do:

View File

@ -485,7 +485,7 @@ public class Version {
}
String[] parts = version.split("\\.");
if (parts.length < 3 || parts.length > 4) {
throw new IllegalArgumentException("the version needs to contain major, minor and revision, and optionally the build");
throw new IllegalArgumentException("the version needs to contain major, minor and revision, and optionally the build: " + version);
}
try {

View File

@ -104,9 +104,14 @@ final class TermVectorsWriter {
if (flags.contains(Flag.TermStatistics)) {
// get the doc frequency
if (dfs != null) {
writeTermStatistics(dfs.termStatistics().get(term));
final TermStatistics statistics = dfs.termStatistics().get(term);
writeTermStatistics(statistics == null ? new TermStatistics(termBytesRef, 0, 0) : statistics);
} else {
writeTermStatistics(topLevelIterator);
if (foundTerm) {
writeTermStatistics(topLevelIterator);
} else {
writeTermStatistics(new TermStatistics(termBytesRef, 0, 0));
}
}
}
if (useDocsAndPos) {

View File

@ -26,6 +26,8 @@ import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.CreationException;
import org.elasticsearch.common.inject.spi.Message;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.jna.Natives;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
@ -153,7 +155,7 @@ public class Bootstrap {
if (pidFile != null) {
try {
PidFile.create(Paths.get(pidFile), true);
PidFile.create(PathUtils.get(pidFile), true);
} catch (Exception e) {
String errorMessage = buildErrorMessage("pid", e);
sysError(errorMessage, true);

View File

@ -208,7 +208,7 @@ public final class FileSystemUtils {
} else if (suffix != null) {
if (!isSameFile(file, path)) {
// If it already exists we try to copy this new version appending suffix to its name
path = Paths.get(path.toString().concat(suffix));
path = path.resolveSibling(path.getFileName().toString().concat(suffix));
// We just move the file to new dir but with a new name (appended with suffix)
Files.move(file, path, StandardCopyOption.REPLACE_EXISTING);
}
@ -258,6 +258,8 @@ public final class FileSystemUtils {
Files.walkFileTree(source, new TreeCopier(source, destination, true));
}
}
// TODO: note that this will fail if source and target are on different NIO.2 filesystems.
static class TreeCopier extends SimpleFileVisitor<Path> {
private final Path source;

View File

@ -0,0 +1,84 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.io;
import org.elasticsearch.common.SuppressForbidden;
import java.net.URI;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.nio.file.Paths;
/**
* Utilities for creating a Path from names,
* or accessing the default FileSystem.
* <p>
* This class allows the default filesystem to
* be changed during tests.
*/
@SuppressForbidden(reason = "accesses the default filesystem by design")
public final class PathUtils {
/** no instantiation */
private PathUtils() {}
/** the actual JDK default */
static final FileSystem ACTUAL_DEFAULT = FileSystems.getDefault();
/** can be changed by tests (via reflection) */
private static volatile FileSystem DEFAULT = ACTUAL_DEFAULT;
/**
* Returns a {@code Path} from name components.
* <p>
* This works just like {@code Paths.get()}.
* Remember: just like {@code Paths.get()} this is NOT A STRING CONCATENATION
* UTILITY FUNCTION.
* <p>
* Remember: this should almost never be used. Usually resolve
* a path against an existing one!
*/
public static Path get(String first, String... more) {
return DEFAULT.getPath(first, more);
}
/**
* Returns a {@code Path} from a URI
* <p>
* This works just like {@code Paths.get()}.
* <p>
* Remember: this should almost never be used. Usually resolve
* a path against an existing one!
*/
public static Path get(URI uri) {
if (uri.getScheme().equalsIgnoreCase("file")) {
return DEFAULT.provider().getPath(uri);
} else {
return Paths.get(uri);
}
}
/**
* Returns the default FileSystem.
*/
public static FileSystem getDefaultFileSystem() {
return DEFAULT;
}
}

View File

@ -36,6 +36,9 @@ public class EsExecutors {
*/
public static final String PROCESSORS = "processors";
/** Useful for testing */
public static final String DEFAULT_SYSPROP = "es.processors.override";
/**
* Returns the number of processors available but at most <tt>32</tt>.
*/
@ -44,7 +47,11 @@ public class EsExecutors {
* ie. >= 48 create too many threads and run into OOM see #3478
* We just use an 32 core upper-bound here to not stress the system
* too much with too many created threads */
return settings.getAsInt(PROCESSORS, Math.min(32, Runtime.getRuntime().availableProcessors()));
int defaultValue = Math.min(32, Runtime.getRuntime().availableProcessors());
try {
defaultValue = Integer.parseInt(System.getProperty(DEFAULT_SYSPROP));
} catch (Throwable ignored) {}
return settings.getAsInt(PROCESSORS, defaultValue);
}
public static PrioritizedEsThreadPoolExecutor newSinglePrioritizing(ThreadFactory threadFactory) {

View File

@ -20,6 +20,8 @@
package org.elasticsearch.env;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings;
@ -68,25 +70,25 @@ public class Environment {
public Environment(Settings settings) {
this.settings = settings;
if (settings.get("path.home") != null) {
homeFile = Paths.get(cleanPath(settings.get("path.home")));
homeFile = PathUtils.get(cleanPath(settings.get("path.home")));
} else {
homeFile = Paths.get(System.getProperty("user.dir"));
homeFile = PathUtils.get(System.getProperty("user.dir"));
}
if (settings.get("path.conf") != null) {
configFile = Paths.get(cleanPath(settings.get("path.conf")));
configFile = PathUtils.get(cleanPath(settings.get("path.conf")));
} else {
configFile = homeFile.resolve("config");
}
if (settings.get("path.plugins") != null) {
pluginsFile = Paths.get(cleanPath(settings.get("path.plugins")));
pluginsFile = PathUtils.get(cleanPath(settings.get("path.plugins")));
} else {
pluginsFile = homeFile.resolve("plugins");
}
if (settings.get("path.work") != null) {
workFile = Paths.get(cleanPath(settings.get("path.work")));
workFile = PathUtils.get(cleanPath(settings.get("path.work")));
} else {
workFile = homeFile.resolve("work");
}
@ -97,7 +99,7 @@ public class Environment {
dataFiles = new Path[dataPaths.length];
dataWithClusterFiles = new Path[dataPaths.length];
for (int i = 0; i < dataPaths.length; i++) {
dataFiles[i] = Paths.get(dataPaths[i]);
dataFiles[i] = PathUtils.get(dataPaths[i]);
dataWithClusterFiles[i] = dataFiles[i].resolve(ClusterName.clusterNameFromSettings(settings).value());
}
} else {
@ -106,7 +108,7 @@ public class Environment {
}
if (settings.get("path.logs") != null) {
logsFile = Paths.get(cleanPath(settings.get("path.logs")));
logsFile = PathUtils.get(cleanPath(settings.get("path.logs")));
} else {
logsFile = homeFile.resolve("logs");
}
@ -178,7 +180,7 @@ public class Environment {
public URL resolveConfig(String path) throws FailedToResolveConfigException {
String origPath = path;
// first, try it as a path on the file system
Path f1 = Paths.get(path);
Path f1 = PathUtils.get(path);
if (Files.exists(f1)) {
try {
return f1.toUri().toURL();

View File

@ -22,6 +22,7 @@ package org.elasticsearch.env;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.common.primitives.Ints;
import org.apache.lucene.store.*;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.IOUtils;
@ -33,6 +34,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
@ -128,7 +130,8 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
int maxLocalStorageNodes = settings.getAsInt("node.max_local_storage_nodes", 50);
for (int possibleLockId = 0; possibleLockId < maxLocalStorageNodes; possibleLockId++) {
for (int dirIndex = 0; dirIndex < environment.dataWithClusterFiles().length; dirIndex++) {
Path dir = environment.dataWithClusterFiles()[dirIndex].resolve(Paths.get(NODES_FOLDER, Integer.toString(possibleLockId)));
// TODO: wtf with resolve(get())
Path dir = environment.dataWithClusterFiles()[dirIndex].resolve(PathUtils.get(NODES_FOLDER, Integer.toString(possibleLockId)));
Files.createDirectories(dir);
try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) {
@ -616,7 +619,8 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
final NodePath[] nodePaths = nodePaths();
final Path[] shardLocations = new Path[nodePaths.length];
for (int i = 0; i < nodePaths.length; i++) {
shardLocations[i] = nodePaths[i].path.resolve(Paths.get(INDICES_FOLDER,
// TODO: wtf with resolve(get())
shardLocations[i] = nodePaths[i].path.resolve(PathUtils.get(INDICES_FOLDER,
shardId.index().name(),
Integer.toString(shardId.id())));
}
@ -730,9 +734,9 @@ public class NodeEnvironment extends AbstractComponent implements Closeable {
// This assert is because this should be caught by MetaDataCreateIndexService
assert customPathsEnabled;
if (addNodeId) {
return Paths.get(customDataDir, Integer.toString(this.localNodeId));
return PathUtils.get(customDataDir, Integer.toString(this.localNodeId));
} else {
return Paths.get(customDataDir);
return PathUtils.get(customDataDir);
}
} else {
throw new ElasticsearchIllegalArgumentException("no custom " + IndexMetaData.SETTING_DATA_PATH + " setting available");

View File

@ -25,6 +25,7 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.node.service.NodeService;
@ -175,7 +176,7 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
// Convert file separators.
sitePath = sitePath.replace("/", separator);
// this is a plugin provided site, serve it as static files from the plugin location
Path file = FileSystemUtils.append(siteFile, Paths.get(sitePath), 0);
Path file = FileSystemUtils.append(siteFile, PathUtils.get(sitePath), 0);
if (!Files.exists(file) || Files.isHidden(file)) {
channel.sendResponse(new BytesRestResponse(NOT_FOUND));
return;

View File

@ -50,6 +50,8 @@ public class CodecService extends AbstractIndexComponent {
public final static String DEFAULT_CODEC = "default";
public final static String BEST_COMPRESSION_CODEC = "best_compression";
/** the raw unfiltered lucene default. useful for testing */
public final static String LUCENE_DEFAULT_CODEC = "lucene_default";
public CodecService(Index index) {
this(index, ImmutableSettings.Builder.EMPTY_SETTINGS);
@ -73,6 +75,7 @@ public class CodecService extends AbstractIndexComponent {
codecs.put(BEST_COMPRESSION_CODEC,
new PerFieldMappingPostingFormatCodec(Mode.BEST_COMPRESSION, mapperService, logger));
}
codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault());
for (String codec : Codec.availableCodecs()) {
codecs.put(codec, Codec.forName(codec));
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.engine;
import com.google.common.collect.Lists;
import org.apache.lucene.index.*;
import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
import org.apache.lucene.search.*;
@ -27,6 +28,7 @@ import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.routing.DjbHashFunction;
import org.elasticsearch.common.Nullable;
@ -999,7 +1001,12 @@ public class InternalEngine extends Engine {
iwc.setCommitOnClose(false); // we by default don't commit on close
iwc.setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND);
iwc.setIndexDeletionPolicy(deletionPolicy);
iwc.setInfoStream(new LoggerInfoStream(logger));
// with tests.verbose, lucene sets this up: plumb to align with filesystem stream
boolean verbose = false;
try {
verbose = Boolean.parseBoolean(System.getProperty("tests.verbose"));
} catch (Throwable ignore) {}
iwc.setInfoStream(verbose ? InfoStream.getDefault() : new LoggerInfoStream(logger));
iwc.setMergeScheduler(mergeScheduler.newMergeScheduler());
MergePolicy mergePolicy = mergePolicyProvider.getMergePolicy();
// Give us the opportunity to upgrade old segments while performing

View File

@ -45,6 +45,7 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.lucene.search.AndFilter;
import org.elasticsearch.common.lucene.search.NotFilter;
@ -180,7 +181,7 @@ public class MapperService extends AbstractIndexComponent {
} catch (FailedToResolveConfigException e) {
// not there, default to the built in one
try {
percolatorMappingUrl = Paths.get(percolatorMappingLocation).toUri().toURL();
percolatorMappingUrl = PathUtils.get(percolatorMappingLocation).toUri().toURL();
} catch (MalformedURLException e1) {
throw new FailedToResolveConfigException("Failed to resolve default percolator mapping location [" + percolatorMappingLocation + "]");
}
@ -231,7 +232,7 @@ public class MapperService extends AbstractIndexComponent {
} catch (FailedToResolveConfigException e) {
// not there, default to the built in one
try {
mappingUrl = Paths.get(mappingLocation).toUri().toURL();
mappingUrl = PathUtils.get(mappingLocation).toUri().toURL();
} catch (MalformedURLException e1) {
throw new FailedToResolveConfigException("Failed to resolve dynamic mapping location [" + mappingLocation + "]");
}

View File

@ -22,11 +22,13 @@ import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.util.concurrent.UncheckedExecutionException;
import org.apache.lucene.analysis.hunspell.Dictionary;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
@ -116,7 +118,7 @@ public class HunspellService extends AbstractComponent {
private Path resolveHunspellDirectory(Settings settings, Environment env) {
String location = settings.get(HUNSPELL_LOCATION, null);
if (location != null) {
return Paths.get(location);
return PathUtils.get(location);
}
return env.configFile().resolve("hunspell");
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.plugins;
import com.google.common.base.Charsets;
import com.google.common.collect.*;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
@ -35,6 +36,7 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.ImmutableSettings;
@ -61,7 +63,7 @@ public class PluginsService extends AbstractComponent {
public static final String ES_PLUGIN_PROPERTIES = "es-plugin.properties";
public static final String LOAD_PLUGIN_FROM_CLASSPATH = "plugins.load_classpath_plugins";
private static final PathMatcher PLUGIN_LIB_MATCHER = FileSystems.getDefault().getPathMatcher("glob:**.{jar,zip}");
static final String PLUGIN_LIB_PATTERN = "glob:**.{jar,zip}";
public static final String PLUGINS_CHECK_LUCENE_KEY = "plugins.check_lucene";
public static final String PLUGINS_INFO_REFRESH_INTERVAL_KEY = "plugins.info_refresh_interval";
@ -393,9 +395,11 @@ public class PluginsService extends AbstractComponent {
libFiles.addAll(Arrays.asList(files(libLocation)));
}
PathMatcher matcher = PathUtils.getDefaultFileSystem().getPathMatcher(PLUGIN_LIB_PATTERN);
// if there are jars in it, add it as well
for (Path libFile : libFiles) {
if (!hasLibExtension(libFile)) {
if (!matcher.matches(libFile)) {
continue;
}
addURL.invoke(classLoader, libFile.toUri().toURL());
@ -407,10 +411,6 @@ public class PluginsService extends AbstractComponent {
}
}
protected static boolean hasLibExtension(Path lib) {
return PLUGIN_LIB_MATCHER.matches(lib);
}
private Path[] files(Path from) throws IOException {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(from)) {
return Iterators.toArray(stream.iterator(), Path.class);

View File

@ -23,6 +23,8 @@ import org.elasticsearch.common.blobstore.BlobPath;
import org.elasticsearch.common.blobstore.BlobStore;
import org.elasticsearch.common.blobstore.fs.FsBlobStore;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.snapshots.IndexShardRepository;
import org.elasticsearch.repositories.RepositoryException;
@ -74,7 +76,7 @@ public class FsRepository extends BlobStoreRepository {
logger.warn("using local fs location for gateway, should be changed to be a shared location across nodes");
throw new RepositoryException(name.name(), "missing location");
} else {
locationFile = Paths.get(location);
locationFile = PathUtils.get(location);
}
blobStore = new FsBlobStore(settings, locationFile);
this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("repositories.fs.chunk_size", null));

View File

@ -41,7 +41,7 @@ import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;
import java.io.IOException;
@ -56,7 +56,7 @@ import static org.hamcrest.Matchers.equalTo;
/**
*/
public class BlendedTermQueryTest extends ElasticsearchLuceneTestCase {
public class BlendedTermQueryTest extends ElasticsearchTestCase {
@Test
public void testBooleanQuery() throws IOException {

View File

@ -28,10 +28,9 @@ import org.apache.lucene.search.*;
import org.apache.lucene.search.highlight.DefaultEncoder;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.UnicodeUtil;
import org.elasticsearch.search.highlight.HighlightUtils;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;
import java.io.IOException;
@ -40,8 +39,7 @@ import java.util.*;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
@LuceneTestCase.SuppressCodecs({"MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene3x"})
public class CustomPostingsHighlighterTests extends ElasticsearchLuceneTestCase {
public class CustomPostingsHighlighterTests extends ElasticsearchTestCase {
@Test
public void testDiscreteHighlightingPerValue() throws Exception {

View File

@ -27,7 +27,8 @@ import org.apache.lucene.search.*;
import org.apache.lucene.search.highlight.DefaultEncoder;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;
import java.io.BufferedReader;
@ -40,8 +41,7 @@ import java.util.Map;
import static org.hamcrest.CoreMatchers.*;
@LuceneTestCase.SuppressCodecs({"MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene3x"})
public class XPostingsHighlighterTests extends ElasticsearchLuceneTestCase {
public class XPostingsHighlighterTests extends ElasticsearchTestCase {
/*
Tests changes needed to make possible to perform discrete highlighting.

View File

@ -1,448 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.util;
import com.carrotsearch.randomizedtesting.*;
import com.carrotsearch.randomizedtesting.annotations.Listeners;
import com.carrotsearch.randomizedtesting.annotations.TestGroup;
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
import com.carrotsearch.randomizedtesting.rules.NoClassHooksShadowingRule;
import com.carrotsearch.randomizedtesting.rules.NoInstanceHooksOverridesRule;
import com.carrotsearch.randomizedtesting.rules.StaticFieldsInvariantRule;
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesInvariantRule;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.test.AfterTestRule;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.rules.RuleChain;
import org.junit.rules.TestRule;
import org.junit.runner.RunWith;
import java.io.Closeable;
import java.io.IOException;
import java.lang.annotation.*;
import java.lang.reflect.Method;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.logging.Logger;
@TestMethodProviders({
LuceneJUnit3MethodProvider.class,
JUnit4MethodProvider.class
})
@Listeners({
ReproduceInfoPrinter.class,
FailureMarker.class
})
@RunWith(value = com.carrotsearch.randomizedtesting.RandomizedRunner.class)
@SuppressCodecs(value = "Lucene3x")
// NOTE: this class is in o.a.lucene.util since it uses some classes that are related
// to the test framework that didn't make sense to copy but are package private access
public abstract class AbstractRandomizedTest extends RandomizedTest {
/**
* The number of concurrent JVMs used to run the tests, Default is <tt>1</tt>
*/
public static final int CHILD_JVM_COUNT = Integer.parseInt(System.getProperty(SysGlobals.CHILDVM_SYSPROP_JVM_COUNT, "1"));
/**
* The child JVM ordinal of this JVM. Default is <tt>0</tt>
*/
public static final int CHILD_JVM_ID = Integer.parseInt(System.getProperty(SysGlobals.CHILDVM_SYSPROP_JVM_ID, "0"));
/**
* Annotation for backwards compat tests
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = false, sysProperty = TESTS_BACKWARDS_COMPATIBILITY)
public @interface Backwards {
}
/**
* Key used to set the path for the elasticsearch executable used to run backwards compatibility tests from
* via the commandline -D{@value #TESTS_BACKWARDS_COMPATIBILITY}
*/
public static final String TESTS_BACKWARDS_COMPATIBILITY = "tests.bwc";
public static final String TESTS_BACKWARDS_COMPATIBILITY_VERSION = "tests.bwc.version";
/**
* Key used to set the path for the elasticsearch executable used to run backwards compatibility tests from
* via the commandline -D{@value #TESTS_BACKWARDS_COMPATIBILITY_PATH}
*/
public static final String TESTS_BACKWARDS_COMPATIBILITY_PATH = "tests.bwc.path";
/**
* Annotation for REST tests
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = true, sysProperty = TESTS_REST)
public @interface Rest {
}
/**
* Property that allows to control whether the REST tests are run (default) or not
*/
public static final String TESTS_REST = "tests.rest";
/**
* Annotation for integration tests
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = true, sysProperty = SYSPROP_INTEGRATION)
public @interface Integration {
}
// --------------------------------------------------------------------
// Test groups, system properties and other annotations modifying tests
// --------------------------------------------------------------------
/**
* @see #ignoreAfterMaxFailures
*/
public static final String SYSPROP_MAXFAILURES = "tests.maxfailures";
/**
* @see #ignoreAfterMaxFailures
*/
public static final String SYSPROP_FAILFAST = "tests.failfast";
public static final String SYSPROP_INTEGRATION = "tests.integration";
public static final String SYSPROP_PROCESSORS = "tests.processors";
// -----------------------------------------------------------------
// Truly immutable fields and constants, initialized once and valid
// for all suites ever since.
// -----------------------------------------------------------------
/**
* Use this constant when creating Analyzers and any other version-dependent stuff.
* <p><b>NOTE:</b> Change this when development starts for new Lucene version:
*/
public static final Version TEST_VERSION_CURRENT = Lucene.VERSION;
/**
* True if and only if tests are run in verbose mode. If this flag is false
* tests are not expected to print any messages.
*/
public static final boolean VERBOSE = systemPropertyAsBoolean("tests.verbose", false);
/**
* A random multiplier which you should use when writing random tests:
* multiply it by the number of iterations to scale your tests (for nightly builds).
*/
public static final int RANDOM_MULTIPLIER = systemPropertyAsInt("tests.multiplier", 1);
/**
* TODO: javadoc?
*/
public static final String DEFAULT_LINE_DOCS_FILE = "europarl.lines.txt.gz";
/**
* the line file used by LineFileDocs
*/
public static final String TEST_LINE_DOCS_FILE = System.getProperty("tests.linedocsfile", DEFAULT_LINE_DOCS_FILE);
/**
* Create indexes in this directory, optimally use a subdir, named after the test
*/
public static final Path TEMP_DIR;
public static final int TESTS_PROCESSORS;
static {
String s = System.getProperty("tempDir", System.getProperty("java.io.tmpdir"));
if (s == null)
throw new RuntimeException("To run tests, you need to define system property 'tempDir' or 'java.io.tmpdir'.");
TEMP_DIR = Paths.get(s);
try {
Files.createDirectories(TEMP_DIR);
} catch (IOException e) {
throw new RuntimeException(e);
}
String processors = System.getProperty(SYSPROP_PROCESSORS, ""); // mvn sets "" as default
if (processors == null || processors.isEmpty()) {
processors = Integer.toString(EsExecutors.boundedNumberOfProcessors(ImmutableSettings.EMPTY));
}
TESTS_PROCESSORS = Integer.parseInt(processors);
}
/**
* These property keys will be ignored in verification of altered properties.
*
* @see SystemPropertiesInvariantRule
* @see #ruleChain
* @see #classRules
*/
private static final String[] IGNORED_INVARIANT_PROPERTIES = {
"user.timezone", "java.rmi.server.randomIDs", "sun.nio.ch.bugLevel",
"solr.directoryFactory", "solr.solr.home", "solr.data.dir" // these might be set by the LuceneTestCase -- ignore
};
// -----------------------------------------------------------------
// Fields initialized in class or instance rules.
// -----------------------------------------------------------------
// -----------------------------------------------------------------
// Class level (suite) rules.
// -----------------------------------------------------------------
/**
* Stores the currently class under test.
*/
private static final TestRuleStoreClassName classNameRule;
/**
* Class environment setup rule.
*/
static final TestRuleSetupAndRestoreClassEnv classEnvRule;
/**
* Suite failure marker (any error in the test or suite scope).
*/
public final static TestRuleMarkFailure suiteFailureMarker =
new TestRuleMarkFailure();
/**
* Ignore tests after hitting a designated number of initial failures. This
* is truly a "static" global singleton since it needs to span the lifetime of all
* test classes running inside this JVM (it cannot be part of a class rule).
* <p/>
* <p>This poses some problems for the test framework's tests because these sometimes
* trigger intentional failures which add up to the global count. This field contains
* a (possibly) changing reference to {@link TestRuleIgnoreAfterMaxFailures} and we
* dispatch to its current value from the {@link #classRules} chain using {@link TestRuleDelegate}.
*/
private static final AtomicReference<TestRuleIgnoreAfterMaxFailures> ignoreAfterMaxFailuresDelegate;
private static final TestRule ignoreAfterMaxFailures;
private static final AfterTestRule.Task noOpAfterRuleTask = new AfterTestRule.Task();
static {
int maxFailures = systemPropertyAsInt(SYSPROP_MAXFAILURES, Integer.MAX_VALUE);
boolean failFast = systemPropertyAsBoolean(SYSPROP_FAILFAST, false);
if (failFast) {
if (maxFailures == Integer.MAX_VALUE) {
maxFailures = 1;
} else {
Logger.getLogger(LuceneTestCase.class.getSimpleName()).warning(
"Property '" + SYSPROP_MAXFAILURES + "'=" + maxFailures + ", 'failfast' is" +
" ignored.");
}
}
ignoreAfterMaxFailuresDelegate =
new AtomicReference<>(
new TestRuleIgnoreAfterMaxFailures(maxFailures));
ignoreAfterMaxFailures = TestRuleDelegate.of(ignoreAfterMaxFailuresDelegate);
}
/**
* Temporarily substitute the global {@link TestRuleIgnoreAfterMaxFailures}. See
* {@link #ignoreAfterMaxFailuresDelegate} for some explanation why this method
* is needed.
*/
public static TestRuleIgnoreAfterMaxFailures replaceMaxFailureRule(TestRuleIgnoreAfterMaxFailures newValue) {
return ignoreAfterMaxFailuresDelegate.getAndSet(newValue);
}
/**
* Max 10mb of static data stored in a test suite class after the suite is complete.
* Prevents static data structures leaking and causing OOMs in subsequent tests.
*/
private final static long STATIC_LEAK_THRESHOLD = 10 * 1024 * 1024;
/**
* By-name list of ignored types like loggers etc.
*/
private final static Set<String> STATIC_LEAK_IGNORED_TYPES =
Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
EnumSet.class.getName())));
private final static Set<Class<?>> TOP_LEVEL_CLASSES =
Collections.unmodifiableSet(new HashSet<Class<?>>(Arrays.asList(
AbstractRandomizedTest.class, LuceneTestCase.class,
ElasticsearchIntegrationTest.class, ElasticsearchTestCase.class)));
/**
* This controls how suite-level rules are nested. It is important that _all_ rules declared
* in {@link LuceneTestCase} are executed in proper order if they depend on each
* other.
*/
@ClassRule
public static TestRule classRules = RuleChain
.outerRule(new TestRuleIgnoreTestSuites())
.around(ignoreAfterMaxFailures)
.around(suiteFailureMarker)
.around(new TestRuleAssertionsRequired())
.around(new StaticFieldsInvariantRule(STATIC_LEAK_THRESHOLD, true) {
@Override
protected boolean accept(java.lang.reflect.Field field) {
// Don't count known classes that consume memory once.
if (STATIC_LEAK_IGNORED_TYPES.contains(field.getType().getName())) {
return false;
}
// Don't count references from ourselves, we're top-level.
if (TOP_LEVEL_CLASSES.contains(field.getDeclaringClass())) {
return false;
}
return super.accept(field);
}
})
.around(new NoClassHooksShadowingRule())
.around(new NoInstanceHooksOverridesRule() {
@Override
protected boolean verify(Method key) {
String name = key.getName();
return !(name.equals("setUp") || name.equals("tearDown"));
}
})
.around(new SystemPropertiesInvariantRule(IGNORED_INVARIANT_PROPERTIES))
.around(classNameRule = new TestRuleStoreClassName())
.around(classEnvRule = new TestRuleSetupAndRestoreClassEnv());
// -----------------------------------------------------------------
// Test level rules.
// -----------------------------------------------------------------
/**
* Enforces {@link #setUp()} and {@link #tearDown()} calls are chained.
*/
private TestRuleSetupTeardownChained parentChainCallRule = new TestRuleSetupTeardownChained();
/**
* Save test thread and name.
*/
private TestRuleThreadAndTestName threadAndTestNameRule = new TestRuleThreadAndTestName();
/**
* Taint suite result with individual test failures.
*/
private TestRuleMarkFailure testFailureMarker = new TestRuleMarkFailure(suiteFailureMarker);
protected AfterTestRule afterTestRule = new AfterTestRule(afterTestTask());
/**
* This controls how individual test rules are nested. It is important that
* _all_ rules declared in {@link LuceneTestCase} are executed in proper order
* if they depend on each other.
*/
@Rule
public final TestRule ruleChain = RuleChain
.outerRule(testFailureMarker)
.around(ignoreAfterMaxFailures)
.around(threadAndTestNameRule)
.around(new SystemPropertiesInvariantRule(IGNORED_INVARIANT_PROPERTIES))
.around(new TestRuleSetupAndRestoreInstanceEnv())
.around(parentChainCallRule)
.around(afterTestRule);
// -----------------------------------------------------------------
// Suite and test case setup/ cleanup.
// -----------------------------------------------------------------
/** MockFSDirectoryService sets this: */
public static boolean checkIndexFailed;
/**
* For subclasses to override. Overrides must call {@code super.setUp()}.
*/
@Before
public void setUp() throws Exception {
parentChainCallRule.setupCalled = true;
checkIndexFailed = false;
}
/**
* For subclasses to override. Overrides must call {@code super.tearDown()}.
*/
@After
public void tearDown() throws Exception {
parentChainCallRule.teardownCalled = true;
assertFalse("at least one shard failed CheckIndex", checkIndexFailed);
}
// -----------------------------------------------------------------
// Test facilities and facades for subclasses.
// -----------------------------------------------------------------
/**
* Registers a {@link Closeable} resource that should be closed after the test
* completes.
*
* @return <code>resource</code> (for call chaining).
*/
@Override
public <T extends Closeable> T closeAfterTest(T resource) {
return RandomizedContext.current().closeAtEnd(resource, LifecycleScope.TEST);
}
/**
* Registers a {@link Closeable} resource that should be closed after the suite
* completes.
*
* @return <code>resource</code> (for call chaining).
*/
public static <T extends Closeable> T closeAfterSuite(T resource) {
return RandomizedContext.current().closeAtEnd(resource, LifecycleScope.SUITE);
}
/**
* Return the current class being tested.
*/
public static Class<?> getTestClass() {
return classNameRule.getTestClass();
}
/**
* Return the name of the currently executing test case.
*/
public String getTestName() {
return threadAndTestNameRule.testMethodName;
}
protected AfterTestRule.Task afterTestTask() {
return noOpAfterRuleTask;
}
}

View File

@ -63,8 +63,8 @@ public class SloppyMathTests extends ElasticsearchTestCase {
for (int i = 0; i < 100; i++) {
// crop pole areas, sine we now there the function
// is not accurate around lat(89°, 90°) and lat(-90°, -89°)
final double lat2 = Math.max(-89.0, Math.min(+89.0, lat1 + (randomDouble() - 0.5) * 2 * deltaDeg[test]));
final double lon2 = lon1 + (randomDouble() - 0.5) * 2 * deltaDeg[test];
final double lat2 = Math.max(-89.0, Math.min(+89.0, lat1 + (random().nextDouble() - 0.5) * 2 * deltaDeg[test]));
final double lon2 = lon1 + (random().nextDouble() - 0.5) * 2 * deltaDeg[test];
final double accurate = GeoDistance.ARC.calculate(lat1, lon1, lat2, lon2, unit);
final double dist = GeoDistance.SLOPPY_ARC.calculate(lat1, lon1, lat2, lon2, unit);
@ -83,10 +83,10 @@ public class SloppyMathTests extends ElasticsearchTestCase {
private static final double randomLatitude() {
// crop pole areas, sine we now there the function
// is not accurate around lat(89°, 90°) and lat(-90°, -89°)
return (getRandom().nextDouble() - 0.5) * 178.0;
return (random().nextDouble() - 0.5) * 178.0;
}
private static final double randomLongitude() {
return (getRandom().nextDouble() - 0.5) * 360.0;
return (random().nextDouble() - 0.5) * 360.0;
}
}

View File

@ -20,9 +20,12 @@ package org.elasticsearch;
import com.google.common.base.Joiner;
import com.google.common.collect.Sets;
import junit.framework.TestCase;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.ElasticsearchTokenStreamTestCase;
import org.junit.Ignore;
@ -51,10 +54,10 @@ public class NamingConventionTests extends ElasticsearchTestCase {
String[] packages = {"org.elasticsearch", "org.apache.lucene"};
for (final String packageName : packages) {
final String path = "/" + packageName.replace('.', '/');
final Path startPath = Paths.get(NamingConventionTests.class.getResource(path).toURI());
final Set<Path> ignore = Sets.newHashSet(Paths.get("/org/elasticsearch/stresstest"), Paths.get("/org/elasticsearch/benchmark/stress"));
final Path startPath = getDataPath(path);
final Set<Path> ignore = Sets.newHashSet(PathUtils.get("/org/elasticsearch/stresstest"), PathUtils.get("/org/elasticsearch/benchmark/stress"));
Files.walkFileTree(startPath, new FileVisitor<Path>() {
private Path pkgPrefix = Paths.get(path).getParent();
private Path pkgPrefix = PathUtils.get(path).getParent();
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
Path next = pkgPrefix.resolve(dir.getFileName());
@ -101,7 +104,7 @@ public class NamingConventionTests extends ElasticsearchTestCase {
}
private boolean isTestCase(Class<?> clazz) {
return ElasticsearchTestCase.class.isAssignableFrom(clazz) || ElasticsearchLuceneTestCase.class.isAssignableFrom(clazz) || ElasticsearchTokenStreamTestCase.class.isAssignableFrom(clazz) || LuceneTestCase.class.isAssignableFrom(clazz);
return ElasticsearchTestCase.class.isAssignableFrom(clazz) || ElasticsearchTestCase.class.isAssignableFrom(clazz) || ElasticsearchTokenStreamTestCase.class.isAssignableFrom(clazz) || LuceneTestCase.class.isAssignableFrom(clazz);
}
private Class<?> loadClass(String filename) throws ClassNotFoundException {
@ -135,7 +138,7 @@ public class NamingConventionTests extends ElasticsearchTestCase {
String classesToSubclass = Joiner.on(',').join(
ElasticsearchTestCase.class.getSimpleName(),
ElasticsearchLuceneTestCase.class.getSimpleName(),
ElasticsearchTestCase.class.getSimpleName(),
ElasticsearchTokenStreamTestCase.class.getSimpleName(),
LuceneTestCase.class.getSimpleName());
assertTrue("Not all subclasses of " + ElasticsearchTestCase.class.getSimpleName() +
@ -158,7 +161,7 @@ public class NamingConventionTests extends ElasticsearchTestCase {
public static final class WrongName extends ElasticsearchTestCase {}
public static final class WrongNameTheSecond extends ElasticsearchLuceneTestCase {}
public static final class WrongNameTheSecond extends ElasticsearchTestCase {}
public static final class PlainUnit extends TestCase {}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.VersionUtils;
import org.hamcrest.Matchers;
import org.junit.Test;
@ -33,6 +34,7 @@ import java.util.Map;
import static org.elasticsearch.Version.V_0_20_0;
import static org.elasticsearch.Version.V_0_90_0;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance;
@ -44,7 +46,7 @@ public class VersionTests extends ElasticsearchTestCase {
// we use here is the version that is actually set to the project.version
// in maven
String property = System.getProperty("tests.version", null);
assumeNotNull(property);
assumeTrue("tests.version is set", property != null);
assertEquals(property, Version.CURRENT.toString());
}
@ -71,7 +73,7 @@ public class VersionTests extends ElasticsearchTestCase {
assertThat(Version.CURRENT.luceneVersion, equalTo(org.apache.lucene.util.Version.LATEST));
final int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) {
Version version = randomVersion();
Version version = randomVersion(random());
assertThat(version, sameInstance(Version.fromId(version.id)));
assertThat(version.luceneVersion, sameInstance(Version.fromId(version.id).luceneVersion));
}
@ -80,7 +82,7 @@ public class VersionTests extends ElasticsearchTestCase {
public void testCURRENTIsLatest() {
final int iters = scaledRandomIntBetween(100, 1000);
for (int i = 0; i < iters; i++) {
Version version = randomVersion();
Version version = randomVersion(random());
if (version != Version.CURRENT) {
assertThat("Version: " + version + " should be before: " + Version.CURRENT + " but wasn't", version.before(Version.CURRENT), is(true));
}
@ -90,7 +92,7 @@ public class VersionTests extends ElasticsearchTestCase {
public void testVersionFromString() {
final int iters = scaledRandomIntBetween(100, 1000);
for (int i = 0; i < iters; i++) {
Version version = randomVersion();
Version version = randomVersion(random());
if (version.snapshot()) { // number doesn't include SNAPSHOT but the parser checks for that
assertEquals(Version.fromString(version.number()), version);
} else {
@ -137,9 +139,9 @@ public class VersionTests extends ElasticsearchTestCase {
public void testParseVersion() {
final int iters = scaledRandomIntBetween(100, 1000);
for (int i = 0; i < iters; i++) {
Version version = randomVersion();
Version version = randomVersion(random());
String stringVersion = version.toString();
if (version.snapshot() == false && randomBoolean()) {
if (version.snapshot() == false && random().nextBoolean()) {
version = new Version(version.id, true, version.luceneVersion);
}
Version parsedVersion = Version.fromString(version.toString());
@ -150,7 +152,7 @@ public class VersionTests extends ElasticsearchTestCase {
public void testParseLenient() {
// note this is just a silly sanity check, we test it in lucene
for (Version version : allVersions()) {
for (Version version : VersionUtils.allVersions()) {
org.apache.lucene.util.Version luceneVersion = version.luceneVersion;
String string = luceneVersion.toString().toUpperCase(Locale.ROOT)
.replaceFirst("^LUCENE_(\\d+)_(\\d+)$", "$1.$2");

View File

@ -104,6 +104,7 @@ import org.elasticsearch.search.action.SearchServiceTransportAction;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -117,6 +118,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFa
import static org.hamcrest.Matchers.*;
@ClusterScope(scope = Scope.SUITE, numClientNodes = 1)
@Slow
public class IndicesRequestTests extends ElasticsearchIntegrationTest {
private final List<String> indices = new ArrayList<>();

View File

@ -27,6 +27,7 @@ import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.equalTo;
public class OriginalIndicesTests extends ElasticsearchTestCase {
@ -42,7 +43,7 @@ public class OriginalIndicesTests extends ElasticsearchTestCase {
OriginalIndices originalIndices = randomOriginalIndices();
BytesStreamOutput out = new BytesStreamOutput();
out.setVersion(randomVersion());
out.setVersion(randomVersion(random()));
OriginalIndices.writeOriginalIndices(originalIndices, out);
BytesStreamInput in = new BytesStreamInput(out.bytes());

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.action.admin;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads;
import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequestBuilder;
@ -44,6 +45,7 @@ import static org.hamcrest.Matchers.lessThan;
/**
*/
@Slow
public class HotThreadsTest extends ElasticsearchIntegrationTest {
@Test

View File

@ -24,6 +24,7 @@ import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.VersionUtils;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.equalTo;
@ -42,7 +43,7 @@ public class ClusterStateRequestTest extends ElasticsearchTestCase {
ClusterStateRequest clusterStateRequest = new ClusterStateRequest().routingTable(randomBoolean()).metaData(randomBoolean())
.nodes(randomBoolean()).blocks(randomBoolean()).indices("testindex", "testindex2").indicesOptions(indicesOptions);
Version testVersion = randomVersionBetween(Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT);
Version testVersion = VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT);
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(testVersion);
clusterStateRequest.writeTo(output);

View File

@ -21,6 +21,7 @@
package org.elasticsearch.action.bulk;
import com.google.common.base.Charsets;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;

View File

@ -20,6 +20,7 @@
package org.elasticsearch.action.bulk;
import com.carrotsearch.ant.tasks.junit4.dependencies.com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.elasticsearch.action.get.MultiGetItemResponse;
import org.elasticsearch.action.get.MultiGetRequestBuilder;
import org.elasticsearch.action.get.MultiGetResponse;

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.get;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.index.VersionType;
@ -29,6 +28,7 @@ import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.equalTo;
public class MultiGetShardRequestTests extends ElasticsearchTestCase {
@ -70,7 +70,7 @@ public class MultiGetShardRequestTests extends ElasticsearchTestCase {
}
BytesStreamOutput out = new BytesStreamOutput();
out.setVersion(randomVersion());
out.setVersion(randomVersion(random()));
multiGetShardRequest.writeTo(out);
BytesStreamInput in = new BytesStreamInput(out.bytes());

View File

@ -27,6 +27,7 @@ import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.equalTo;
public class GetIndexedScriptRequestTests extends ElasticsearchTestCase {
@ -40,7 +41,7 @@ public class GetIndexedScriptRequestTests extends ElasticsearchTestCase {
}
BytesStreamOutput out = new BytesStreamOutput();
out.setVersion(randomVersion());
out.setVersion(randomVersion(random()));
request.writeTo(out);
BytesStreamInput in = new BytesStreamInput(out.bytes());

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.mlt;
import org.elasticsearch.Version;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.BytesStreamInput;
@ -33,6 +32,7 @@ import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.*;
public class MoreLikeThisRequestTests extends ElasticsearchTestCase {
@ -99,7 +99,7 @@ public class MoreLikeThisRequestTests extends ElasticsearchTestCase {
}
BytesStreamOutput out = new BytesStreamOutput();
out.setVersion(randomVersion());
out.setVersion(randomVersion(random()));
mltRequest.writeTo(out);
BytesStreamInput in = new BytesStreamInput(out.bytes());

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.CoreMatchers.equalTo;
public class IndicesOptionsTests extends ElasticsearchTestCase {
@ -36,12 +37,12 @@ public class IndicesOptionsTests extends ElasticsearchTestCase {
IndicesOptions indicesOptions = IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean());
BytesStreamOutput output = new BytesStreamOutput();
Version outputVersion = randomVersion();
Version outputVersion = randomVersion(random());
output.setVersion(outputVersion);
indicesOptions.writeIndicesOptions(output);
BytesStreamInput bytesStreamInput = new BytesStreamInput(output.bytes());
bytesStreamInput.setVersion(randomVersion());
bytesStreamInput.setVersion(randomVersion(random()));
IndicesOptions indicesOptions2 = IndicesOptions.readIndicesOptions(bytesStreamInput);
assertThat(indicesOptions2.ignoreUnavailable(), equalTo(indicesOptions.ignoreUnavailable()));

View File

@ -20,10 +20,12 @@
package org.elasticsearch.action.termvectors;
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
import org.apache.lucene.analysis.payloads.PayloadHelper;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.*;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.admin.indices.alias.Alias;
@ -49,6 +51,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
import static org.hamcrest.Matchers.*;
@Slow
public class GetTermVectorsTests extends AbstractTermVectorsTests {
@Test

View File

@ -42,7 +42,7 @@ import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.mapper.core.TypeParsers;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.hamcrest.Matchers;
import org.junit.Test;
@ -55,7 +55,7 @@ import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
public class TermVectorsUnitTests extends ElasticsearchLuceneTestCase {
public class TermVectorsUnitTests extends ElasticsearchTestCase {
@Test
public void streamResponse() throws Exception {

View File

@ -19,6 +19,7 @@
package org.elasticsearch.aliases;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.admin.indices.alias.Alias;
@ -69,6 +70,7 @@ import static org.hamcrest.Matchers.*;
/**
*
*/
@Slow
public class IndexAliasesTests extends ElasticsearchIntegrationTest {
@Test

View File

@ -25,6 +25,7 @@ import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.store.FSDirectory;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.unit.SizeValue;
@ -40,7 +41,7 @@ public class LuceneUidScanBenchmark {
public static void main(String[] args) throws Exception {
FSDirectory dir = FSDirectory.open(Paths.get("work/test"));
FSDirectory dir = FSDirectory.open(PathUtils.get("work/test"));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER));
final int NUMBER_OF_THREADS = 2;

View File

@ -20,6 +20,7 @@ package org.elasticsearch.benchmark.fs;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.unit.ByteSizeValue;
import java.nio.ByteBuffer;
@ -35,7 +36,7 @@ import java.util.Random;
public class FsAppendBenchmark {
public static void main(String[] args) throws Exception {
Path path = Paths.get("work/test.log");
Path path = PathUtils.get("work/test.log");
IOUtils.deleteFilesIgnoringExceptions(path);
int CHUNK = (int) ByteSizeValue.parseBytesSizeValue("1k").bytes();

View File

@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -150,7 +151,7 @@ public class BasicScriptBenchmark {
}
public static void writeHelperFunction() throws IOException {
try (BufferedWriter out = Files.newBufferedWriter(Paths.get("addToPlot.m"), StandardCharsets.UTF_8)) {
try (BufferedWriter out = Files.newBufferedWriter(PathUtils.get("addToPlot.m"), StandardCharsets.UTF_8)) {
out.write("function handle = addToPlot(numTerms, perDoc, color, linestyle, linewidth)\n" + "handle = line(numTerms, perDoc);\n"
+ "set(handle, 'color', color);\n" + "set(handle, 'linestyle',linestyle);\n" + "set(handle, 'LineWidth',linewidth);\n"
+ "end\n");
@ -161,7 +162,7 @@ public class BasicScriptBenchmark {
if (args.length == 0) {
return;
}
try (BufferedWriter out = Files.newBufferedWriter(Paths.get(args[0]), StandardCharsets.UTF_8)) {
try (BufferedWriter out = Files.newBufferedWriter(PathUtils.get(args[0]), StandardCharsets.UTF_8)) {
out.write("#! /usr/local/bin/octave -qf");
out.write("\n\n\n\n");
out.write("######################################\n");

View File

@ -19,8 +19,8 @@
package org.elasticsearch.bwcompat;
import com.carrotsearch.randomizedtesting.LifecycleScope;
import com.google.common.util.concurrent.ListenableFuture;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
@ -48,24 +48,20 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.elasticsearch.test.index.merge.NoMergePolicyProvider;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.rest.client.http.HttpRequestBuilder;
import org.hamcrest.Matchers;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Before;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Modifier;
import java.net.URL;
import java.nio.file.DirectoryStream;
import java.nio.file.FileVisitResult;
import java.nio.file.FileVisitor;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.*;
@ -74,21 +70,21 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@LuceneTestCase.SuppressCodecs({"Lucene3x", "MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene40", "Lucene41", "Appending", "Lucene42", "Lucene45", "Lucene46", "Lucene49"})
@ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST, numDataNodes = 0)
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
@LuceneTestCase.Slow
public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegrationTest {
// TODO: test for proper exception on unsupported indexes (maybe via separate test?)
// We have a 0.20.6.zip etc for this.
static List<String> indexes;
List<String> indexes;
static Path singleDataPath;
static Path[] multiDataPath;
@BeforeClass
public static void initIndexesList() throws Exception {
@Before
public void initIndexesList() throws Exception {
indexes = new ArrayList<>();
URL dirUrl = OldIndexBackwardsCompatibilityTests.class.getResource(".");
Path dir = Paths.get(dirUrl.toURI());
Path dir = getDataPath(".");
try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, "index-*.zip")) {
for (Path path : stream) {
indexes.add(path.getFileName().toString());
@ -99,7 +95,6 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
@AfterClass
public static void tearDownStatics() {
indexes = null;
singleDataPath = null;
multiDataPath = null;
}
@ -116,7 +111,7 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
void setupCluster() throws Exception {
ListenableFuture<List<String>> replicas = internalCluster().startNodesAsync(1); // for replicas
Path baseTempDir = newTempDirPath(LifecycleScope.SUITE);
Path baseTempDir = createTempDir();
// start single data path node
ImmutableSettings.Builder nodeSettings = ImmutableSettings.builder()
.put("path.data", baseTempDir.resolve("single-path").toAbsolutePath())
@ -152,12 +147,12 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
}
String loadIndex(String indexFile) throws Exception {
Path unzipDir = newTempDirPath();
Path unzipDir = createTempDir();
Path unzipDataDir = unzipDir.resolve("data");
String indexName = indexFile.replace(".zip", "").toLowerCase(Locale.ROOT);
// decompress the index
Path backwardsIndex = Paths.get(getClass().getResource(indexFile).toURI());
Path backwardsIndex = getDataPath(indexFile);
try (InputStream stream = Files.newInputStream(backwardsIndex)) {
TestUtil.unzip(stream, unzipDir);
}
@ -230,21 +225,11 @@ public class OldIndexBackwardsCompatibilityTests extends ElasticsearchIntegratio
public void testAllVersionsTested() throws Exception {
SortedSet<String> expectedVersions = new TreeSet<>();
for (java.lang.reflect.Field field : Version.class.getDeclaredFields()) {
if (Modifier.isStatic(field.getModifiers()) && field.getType() == Version.class) {
Version v = (Version) field.get(Version.class);
if (v.snapshot()) {
continue; // snapshots are unreleased, so there is no backcompat yet
}
if (v.onOrBefore(Version.V_0_20_6)) {
continue; // we can only test back one major lucene version
}
if (v.equals(Version.CURRENT)) {
continue; // the current version is always compatible with itself
}
expectedVersions.add("index-" + v.toString() + ".zip");
}
for (Version v : VersionUtils.allVersions()) {
if (v.snapshot()) continue; // snapshots are unreleased, so there is no backcompat yet
if (v.onOrBefore(Version.V_0_20_6)) continue; // we can only test back one major lucene version
if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself
expectedVersions.add("index-" + v.toString() + ".zip");
}
for (String index : indexes) {

View File

@ -26,6 +26,7 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.snapshots.AbstractSnapshotTests;
@ -92,9 +93,9 @@ public class RestoreBackwardsCompatTests extends AbstractSnapshotTests {
}
}
public static List<String> repoVersions() throws Exception {
private List<String> repoVersions() throws Exception {
List<String> repoVersions = newArrayList();
Path repoFiles = Paths.get(RestoreBackwardsCompatTests.class.getResource(".").toURI());
Path repoFiles = getDataPath(".");
try (DirectoryStream<Path> stream = Files.newDirectoryStream(repoFiles, "repo-*.zip")) {
for (Path entry : stream) {
String fileName = entry.getFileName().toString();

View File

@ -33,13 +33,13 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
* These tests are against static indexes, built from versions of ES that cannot be upgraded without
* a full cluster restart (ie no wire format compatibility).
*/
@LuceneTestCase.SuppressCodecs({"Lucene3x", "MockFixedIntBlock", "MockVariableIntBlock", "MockSep", "MockRandom", "Lucene40", "Lucene41", "Appending", "Lucene42", "Lucene45", "Lucene46", "Lucene49"})
@LuceneTestCase.SuppressCodecs("*")
@ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST, numDataNodes = 0, minNumDataNodes = 0, maxNumDataNodes = 0)
public class StaticIndexBackwardCompatibilityTest extends ElasticsearchIntegrationTest {
public void loadIndex(String index, Object... settings) throws Exception {
logger.info("Checking static index " + index);
Settings nodeSettings = prepareBackwardsDataDir(Paths.get(getClass().getResource(index + ".zip").toURI()), settings);
Settings nodeSettings = prepareBackwardsDataDir(getDataPath(index + ".zip"), settings);
internalCluster().startNode(nodeSettings);
ensureGreen(index);
assertIndexSanity(index);

View File

@ -54,7 +54,7 @@ public class TransportClientTests extends ElasticsearchIntegrationTest {
TransportClientNodesService nodeService = client.nodeService();
Node node = nodeBuilder().data(false).settings(ImmutableSettings.builder()
.put(internalCluster().getDefaultSettings())
.put("path.home", newTempDirPath())
.put("path.home", createTempDir())
.put("node.name", "testNodeVersionIsUpdated")
.put("http.enabled", false)
.put("index.store.type", "ram")

View File

@ -20,6 +20,7 @@ package org.elasticsearch.cluster;
import com.google.common.base.Predicate;
import com.google.common.util.concurrent.ListenableFuture;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse;
@ -602,7 +603,7 @@ public class ClusterServiceTests extends ElasticsearchIntegrationTest {
block2.countDown();
}
@Test
@Test @Slow
public void testLocalNodeMasterListenerCallbacks() throws Exception {
Settings settings = settingsBuilder()
.put("discovery.type", "zen")

View File

@ -20,6 +20,7 @@
package org.elasticsearch.cluster;
import com.google.common.base.Predicate;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.client.Client;
@ -169,7 +170,7 @@ public class MinimumMasterNodesTests extends ElasticsearchIntegrationTest {
}
}
@Test
@Test @Slow
public void multipleNodesShutdownNonMasterNodes() throws Exception {
Settings settings = settingsBuilder()
.put("discovery.type", "zen")

View File

@ -37,6 +37,7 @@ import static org.hamcrest.Matchers.*;
*
*/
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
@Slow
public class SpecificMasterNodesTests extends ElasticsearchIntegrationTest {
protected final ImmutableSettings.Builder settingsBuilder() {

View File

@ -33,6 +33,8 @@ import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.test.VersionUtils.randomVersion;
public class DiscoveryNodeTests extends ElasticsearchTestCase {
@ -49,7 +51,7 @@ public class DiscoveryNodeTests extends ElasticsearchTestCase {
for (int a = randomInt(10); a > 0; a--) {
attributes.put(randomUnicodeOfLengthBetween(3, 20), randomUnicodeOfLengthBetween(3, 20));
}
final Version version = randomVersion();
final Version version = randomVersion(random());
DiscoveryNode discoveryNode = new DiscoveryNode(nodeName, id, hostName, hostAddress, transportAddress, attributes, version);
BytesStreamOutput bytesOutput = new BytesStreamOutput();
ThrowableObjectOutputStream too = new ThrowableObjectOutputStream(bytesOutput);

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.VersionUtils;
import java.io.BufferedReader;
import java.io.InputStreamReader;
@ -38,7 +39,7 @@ import java.util.Arrays;
public class RoutingBackwardCompatibilityTests extends ElasticsearchTestCase {
public void testBackwardCompatibility() throws Exception {
Path baseDir = newTempDirPath();
Path baseDir = createTempDir();
Node node = new Node(ImmutableSettings.builder().put("path.home", baseDir.toString()).build(), false);
try {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(RoutingBackwardCompatibilityTests.class.getResourceAsStream("/org/elasticsearch/cluster/routing/shard_routes.txt"), "UTF-8"))) {
@ -57,7 +58,7 @@ public class RoutingBackwardCompatibilityTests extends ElasticsearchTestCase {
final int currentExpectedShard = Integer.parseInt(parts[6]);
OperationRouting operationRouting = node.injector().getInstance(OperationRouting.class);
for (Version version : allVersions()) {
for (Version version : VersionUtils.allVersions()) {
final Settings settings = settings(version).build();
IndexMetaData indexMetaData = IndexMetaData.builder(index).settings(settings).numberOfShards(numberOfShards).numberOfReplicas(randomInt(3)).build();
MetaData.Builder metaData = MetaData.builder().put(indexMetaData, false);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.action.get.GetResponse;
@ -31,12 +32,12 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import java.nio.file.Path;
import java.nio.file.Paths;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
@ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST, numDataNodes = 0, minNumDataNodes = 0, maxNumDataNodes = 0)
@LuceneTestCase.SuppressFileSystems("*") // extra files break the single data cluster expectation when unzipping the static index
public class RoutingBackwardCompatibilityUponUpgradeTests extends ElasticsearchIntegrationTest {
public void testDefaultRouting() throws Exception {
@ -48,7 +49,7 @@ public class RoutingBackwardCompatibilityUponUpgradeTests extends ElasticsearchI
}
private void test(String name, Class<? extends HashFunction> expectedHashFunction, boolean expectedUseType) throws Exception {
Path zippedIndexDir = Paths.get(getClass().getResource("/org/elasticsearch/cluster/routing/" + name + ".zip").toURI());
Path zippedIndexDir = getDataPath("/org/elasticsearch/cluster/routing/" + name + ".zip");
Settings baseSettings = prepareBackwardsDataDir(zippedIndexDir);
internalCluster().startNode(ImmutableSettings.builder()
.put(baseSettings)

View File

@ -19,6 +19,7 @@
package org.elasticsearch.cluster.routing.allocation;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -40,12 +41,13 @@ import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilde
/**
* see issue #9023
*/
@Slow
public class BalanceUnbalancedClusterTest extends CatAllocationTestBase {
@Override
protected Path getCatPath() throws IOException {
Path tmp = newTempDirPath();
try (InputStream stream = Files.newInputStream(getResourcePath("/org/elasticsearch/cluster/routing/issue_9023.zip"))) {
Path tmp = createTempDir();
try (InputStream stream = Files.newInputStream(getDataPath("/org/elasticsearch/cluster/routing/issue_9023.zip"))) {
TestUtil.unzip(stream, tmp);
}
return tmp.resolve("issue_9023");

View File

@ -33,6 +33,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllo
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.ElasticsearchAllocationTestCase;
import org.elasticsearch.test.VersionUtils;
import org.junit.Test;
import java.util.ArrayList;
@ -41,6 +42,7 @@ import java.util.List;
import static org.elasticsearch.cluster.routing.ShardRoutingState.*;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.Matchers.*;
/**
@ -122,7 +124,7 @@ public class NodeVersionAllocationDeciderTests extends ElasticsearchAllocationTe
}
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
.put(newNode("node3", getPreviousVersion())))
.put(newNode("node3", VersionUtils.getPreviousVersion())))
.build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
@ -202,9 +204,9 @@ public class NodeVersionAllocationDeciderTests extends ElasticsearchAllocationTe
} else {
for (int j = nodes.size(); j < numNodes; j++) {
if (frequently()) {
nodes.add(newNode("node" + (nodeIdx++), randomBoolean() ? getPreviousVersion() : Version.CURRENT));
nodes.add(newNode("node" + (nodeIdx++), randomBoolean() ? VersionUtils.getPreviousVersion() : Version.CURRENT));
} else {
nodes.add(newNode("node" + (nodeIdx++), randomVersion()));
nodes.add(newNode("node" + (nodeIdx++), randomVersion(random())));
}
}
}
@ -247,20 +249,20 @@ public class NodeVersionAllocationDeciderTests extends ElasticsearchAllocationTe
assertThat(routingTable.index("test").shard(i).shards().get(2).currentNodeId(), nullValue());
}
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("old0", getPreviousVersion()))
.put(newNode("old1", getPreviousVersion()))
.put(newNode("old2", getPreviousVersion()))).build();
.put(newNode("old0", VersionUtils.getPreviousVersion()))
.put(newNode("old1", VersionUtils.getPreviousVersion()))
.put(newNode("old2", VersionUtils.getPreviousVersion()))).build();
clusterState = stabilize(clusterState, service);
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("old0", getPreviousVersion()))
.put(newNode("old1", getPreviousVersion()))
.put(newNode("old0", VersionUtils.getPreviousVersion()))
.put(newNode("old1", VersionUtils.getPreviousVersion()))
.put(newNode("new0"))).build();
clusterState = stabilize(clusterState, service);
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("node0", getPreviousVersion()))
.put(newNode("node0", VersionUtils.getPreviousVersion()))
.put(newNode("new1"))
.put(newNode("new0"))).build();

View File

@ -20,6 +20,7 @@
package org.elasticsearch.codecs;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.ImmutableSettings;
@ -28,6 +29,7 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.elasticsearch.test.VersionUtils;
import org.junit.Assert;
import java.io.IOException;
@ -36,6 +38,7 @@ import static org.hamcrest.Matchers.containsString;
/**
*/
@Slow
public class CodecTests extends ElasticsearchSingleNodeTest {
public void testAcceptPostingsFormat() throws IOException {
@ -43,7 +46,7 @@ public class CodecTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("field").field("type", "string").field("postings_format", Codec.getDefault().postingsFormat().getName()).endObject().endObject()
.endObject().endObject().string();
int i = 0;
for (Version v : allVersions()) {
for (Version v : VersionUtils.allVersions()) {
IndexService indexService = createIndex("test-" + i++, ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build());
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
try {
@ -66,7 +69,7 @@ public class CodecTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("field").field("type", "string").field("doc_values_format", Codec.getDefault().docValuesFormat().getName()).endObject().endObject()
.endObject().endObject().string();
int i = 0;
for (Version v : allVersions()) {
for (Version v : VersionUtils.allVersions()) {
IndexService indexService = createIndex("test-" + i++, ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build());
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
try {

View File

@ -53,7 +53,7 @@ public class ChannelsTests extends ElasticsearchTestCase {
@Before
public void setUp() throws Exception {
super.setUp();
Path tmpFile = newTempFilePath();
Path tmpFile = createTempFile();
FileChannel randomAccessFile = FileChannel.open(tmpFile, StandardOpenOption.READ, StandardOpenOption.WRITE);
fileChannel = new MockFileChannel(randomAccessFile);
randomBytes = randomUnicodeOfLength(scaledRandomIntBetween(10, 100000)).getBytes("UTF-8");

View File

@ -37,7 +37,7 @@ public class PidFileTests extends ElasticsearchTestCase {
@Test(expected = ElasticsearchIllegalArgumentException.class)
public void testParentIsFile() throws IOException {
Path dir = newTempDirPath();
Path dir = createTempDir();
Path parent = dir.resolve("foo");
try(BufferedWriter stream = Files.newBufferedWriter(parent, Charsets.UTF_8, StandardOpenOption.CREATE_NEW)) {
stream.write("foo");
@ -48,7 +48,7 @@ public class PidFileTests extends ElasticsearchTestCase {
@Test
public void testPidFile() throws IOException {
Path dir = newTempDirPath();
Path dir = createTempDir();
Path parent = dir.resolve("foo");
if (randomBoolean()) {
Files.createDirectories(parent);

View File

@ -18,10 +18,10 @@
*/
package org.elasticsearch.common.blobstore;
import com.carrotsearch.randomizedtesting.LifecycleScope;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.blobstore.fs.FsBlobStore;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
@ -41,6 +41,7 @@ import static com.google.common.collect.Maps.newHashMap;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
@LuceneTestCase.SuppressFileSystems("ExtrasFS")
public class BlobStoreTest extends ElasticsearchTestCase {
@Test
@ -140,7 +141,7 @@ public class BlobStoreTest extends ElasticsearchTestCase {
}
protected BlobStore newBlobStore() throws IOException {
Path tempDir = newTempDirPath(LifecycleScope.TEST);
Path tempDir = createTempDir();
Settings settings = randomBoolean() ? ImmutableSettings.EMPTY : ImmutableSettings.builder().put("buffer_size", new ByteSizeValue(randomIntBetween(1, 100), ByteSizeUnit.KB)).build();
FsBlobStore store = new FsBlobStore(settings, tempDir);
return store;

View File

@ -263,7 +263,7 @@ public class PagedBytesReferenceTest extends ElasticsearchTestCase {
public void testWriteToChannel() throws IOException {
int length = randomIntBetween(10, PAGE_SIZE * 4);
BytesReference pbr = getRandomizedPagedBytesReference(length);
Path tFile = newTempFilePath();
Path tFile = createTempFile();
try (FileChannel channel = FileChannel.open(tFile, StandardOpenOption.WRITE)) {
pbr.writeTo(channel);
assertEquals(pbr.length(), channel.position());
@ -290,7 +290,7 @@ public class PagedBytesReferenceTest extends ElasticsearchTestCase {
int sliceOffset = randomIntBetween(1, length / 2);
int sliceLength = length - sliceOffset;
BytesReference slice = pbr.slice(sliceOffset, sliceLength);
Path tFile = newTempFilePath();
Path tFile = createTempFile();
try (FileChannel channel = FileChannel.open(tFile, StandardOpenOption.WRITE)) {
slice.writeTo(channel);
assertEquals(slice.length(), channel.position());

View File

@ -32,7 +32,9 @@ public class HppcMapsTests extends ElasticsearchTestCase {
@Test
public void testIntersection() throws Exception {
assumeTrue(ASSERTIONS_ENABLED);
boolean enabled = false;
assert enabled = true;
assumeTrue("assertions enabled", enabled);
ObjectOpenHashSet<String> set1 = ObjectOpenHashSet.from("1", "2", "3");
ObjectOpenHashSet<String> set2 = ObjectOpenHashSet.from("1", "2", "3");
List<String> values = toList(HppcMaps.intersection(set1, set2));

View File

@ -20,16 +20,18 @@
package org.elasticsearch.common.io;
import com.google.common.base.Charsets;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists;
@ -39,6 +41,7 @@ import static org.hamcrest.CoreMatchers.is;
/**
* Unit tests for {@link org.elasticsearch.common.io.FileSystemUtils}.
*/
@SuppressFileSystems("WindowsFS") // tries to move away open file handles
public class FileSystemUtilsTests extends ElasticsearchTestCase {
private Path src;
@ -46,14 +49,14 @@ public class FileSystemUtilsTests extends ElasticsearchTestCase {
@Before
public void copySourceFilesToTarget() throws IOException, URISyntaxException {
src = newTempDirPath();
dst = newTempDirPath();
src = createTempDir();
dst = createTempDir();
Files.createDirectories(src);
Files.createDirectories(dst);
// We first copy sources test files from src/test/resources
// Because after when the test runs, src files are moved to their destination
final Path path = Paths.get(FileSystemUtilsTests.class.getResource("/org/elasticsearch/common/io/copyappend").toURI());
final Path path = getDataPath("/org/elasticsearch/common/io/copyappend");
FileSystemUtils.copyDirectoryRecursively(path, src);
}
@ -86,7 +89,7 @@ public class FileSystemUtilsTests extends ElasticsearchTestCase {
@Test
public void testMoveOverExistingFileAndIgnore() throws IOException {
Path dest = newTempDirPath();
Path dest = createTempDir();
FileSystemUtils.moveFilesWithoutOverwriting(src.resolve("v1"), dest, null);
assertFileContent(dest, "file1.txt", "version1");
@ -114,7 +117,7 @@ public class FileSystemUtilsTests extends ElasticsearchTestCase {
@Test
public void testMoveFilesDoesNotCreateSameFileWithSuffix() throws Exception {
Path[] dirs = new Path[] { newTempDirPath(), newTempDirPath(), newTempDirPath()};
Path[] dirs = new Path[] { createTempDir(), createTempDir(), createTempDir()};
for (Path dir : dirs) {
Files.write(dir.resolve("file1.txt"), "file1".getBytes(Charsets.UTF_8));
Files.createDirectory(dir.resolve("dir"));
@ -153,7 +156,7 @@ public class FileSystemUtilsTests extends ElasticsearchTestCase {
Assert.assertThat("file [" + file + "] should not exist.", Files.exists(file), is(false));
} else {
assertFileExists(file);
String fileContent = new String(Files.readAllBytes(file), UTF8);
String fileContent = new String(Files.readAllBytes(file), StandardCharsets.UTF_8);
// trim the string content to prevent different handling on windows vs. unix and CR chars...
Assert.assertThat(fileContent.trim(), equalTo(expected.trim()));
}
@ -161,13 +164,13 @@ public class FileSystemUtilsTests extends ElasticsearchTestCase {
@Test
public void testAppend() {
assertEquals(FileSystemUtils.append(Paths.get("/foo/bar"), Paths.get("/hello/world/this_is/awesome"), 0),
Paths.get("/foo/bar/hello/world/this_is/awesome"));
assertEquals(FileSystemUtils.append(PathUtils.get("/foo/bar"), PathUtils.get("/hello/world/this_is/awesome"), 0),
PathUtils.get("/foo/bar/hello/world/this_is/awesome"));
assertEquals(FileSystemUtils.append(Paths.get("/foo/bar"), Paths.get("/hello/world/this_is/awesome"), 2),
Paths.get("/foo/bar/this_is/awesome"));
assertEquals(FileSystemUtils.append(PathUtils.get("/foo/bar"), PathUtils.get("/hello/world/this_is/awesome"), 2),
PathUtils.get("/foo/bar/this_is/awesome"));
assertEquals(FileSystemUtils.append(Paths.get("/foo/bar"), Paths.get("/hello/world/this_is/awesome"), 1),
Paths.get("/foo/bar/world/this_is/awesome"));
assertEquals(FileSystemUtils.append(PathUtils.get("/foo/bar"), PathUtils.get("/hello/world/this_is/awesome"), 1),
PathUtils.get("/foo/bar/world/this_is/awesome"));
}
}

View File

@ -260,7 +260,7 @@ public class BytesStreamsTests extends ElasticsearchTestCase {
@Test
public void testSimpleStreams() throws Exception {
assumeTrue(Constants.JRE_IS_64BIT);
assumeTrue("requires a 64-bit JRE ... ?!", Constants.JRE_IS_64BIT);
BytesStreamOutput out = new BytesStreamOutput();
out.writeBoolean(false);
out.writeByte((byte)1);

View File

@ -24,6 +24,7 @@ import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LocationInfo;
import org.apache.log4j.spi.LoggingEvent;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
@ -51,7 +52,7 @@ public class Log4jESLoggerTests extends ElasticsearchTestCase {
super.setUp();
this.testLevel = Log4jESLoggerFactory.getLogger("test").getLevel();
LogConfigurator.reset();
Path configDir = resolveConfigDir();
Path configDir = getDataPath("config");
// Need to set custom path.conf so we can use a custom logging.yml file for the test
Settings settings = ImmutableSettings.builder()
.put("path.conf", configDir.toAbsolutePath())
@ -127,11 +128,6 @@ public class Log4jESLoggerTests extends ElasticsearchTestCase {
}
private static Path resolveConfigDir() throws Exception {
URL url = Log4jESLoggerTests.class.getResource("config");
return Paths.get(url.toURI());
}
private static class TestAppender extends AppenderSkeleton {
private List<LoggingEvent> events = new ArrayList<>();

View File

@ -21,6 +21,7 @@ package org.elasticsearch.common.logging.log4j;
import org.apache.log4j.Appender;
import org.apache.log4j.Logger;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ImmutableSettings;
@ -55,7 +56,7 @@ public class LoggingConfigurationTests extends ElasticsearchTestCase {
public void testResolveMultipleConfigs() throws Exception {
String level = Log4jESLoggerFactory.getLogger("test").getLevel();
try {
Path configDir = resolveConfigDir();
Path configDir = getDataPath("config");
Settings settings = ImmutableSettings.builder()
.put("path.conf", configDir.toAbsolutePath())
.build();
@ -82,7 +83,7 @@ public class LoggingConfigurationTests extends ElasticsearchTestCase {
@Test
public void testResolveJsonLoggingConfig() throws Exception {
Path tmpDir = newTempDirPath();
Path tmpDir = createTempDir();
Path loggingConf = tmpDir.resolve(loggingConfiguration("json"));
Files.write(loggingConf, "{\"json\": \"foo\"}".getBytes(StandardCharsets.UTF_8));
Environment environment = new Environment(
@ -97,7 +98,7 @@ public class LoggingConfigurationTests extends ElasticsearchTestCase {
@Test
public void testResolvePropertiesLoggingConfig() throws Exception {
Path tmpDir = newTempDirPath();
Path tmpDir = createTempDir();
Path loggingConf = tmpDir.resolve(loggingConfiguration("properties"));
Files.write(loggingConf, "key: value".getBytes(StandardCharsets.UTF_8));
Environment environment = new Environment(
@ -112,7 +113,7 @@ public class LoggingConfigurationTests extends ElasticsearchTestCase {
@Test
public void testResolveYamlLoggingConfig() throws Exception {
Path tmpDir = newTempDirPath();
Path tmpDir = createTempDir();
Path loggingConf1 = tmpDir.resolve(loggingConfiguration("yml"));
Path loggingConf2 = tmpDir.resolve(loggingConfiguration("yaml"));
Files.write(loggingConf1, "yml: bar".getBytes(StandardCharsets.UTF_8));
@ -130,7 +131,7 @@ public class LoggingConfigurationTests extends ElasticsearchTestCase {
@Test
public void testResolveConfigInvalidFilename() throws Exception {
Path tmpDir = newTempDirPath();
Path tmpDir = createTempDir();
Path invalidSuffix = tmpDir.resolve(loggingConfiguration(randomFrom(LogConfigurator.ALLOWED_SUFFIXES)) + randomInvalidSuffix());
Files.write(invalidSuffix, "yml: bar".getBytes(StandardCharsets.UTF_8));
Environment environment = new Environment(
@ -143,11 +144,6 @@ public class LoggingConfigurationTests extends ElasticsearchTestCase {
assertThat(logSettings.get("yml"), Matchers.nullValue());
}
private static Path resolveConfigDir() throws Exception {
URL url = LoggingConfigurationTests.class.getResource("config");
return Paths.get(url.toURI());
}
private static String loggingConfiguration(String suffix) {
return "logging." + randomAsciiOfLength(randomIntBetween(0, 10)) + "." + suffix;
}

View File

@ -26,7 +26,7 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.Version;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;
import java.io.IOException;
@ -36,7 +36,7 @@ import java.util.Set;
/**
*
*/
public class LuceneTest extends ElasticsearchLuceneTestCase {
public class LuceneTest extends ElasticsearchTestCase {
/*

View File

@ -31,10 +31,10 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
/** Simple tests for this filterreader */
public class ElasticsearchDirectoryReaderTests extends ElasticsearchLuceneTestCase {
public class ElasticsearchDirectoryReaderTests extends ElasticsearchTestCase {
/** Test that core cache key (needed for NRT) is working */
public void testCoreCacheKey() throws Exception {

View File

@ -22,6 +22,7 @@ package org.elasticsearch.common.lucene.index;
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.collect.Lists;
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -35,7 +36,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -48,7 +49,7 @@ import static org.hamcrest.Matchers.is;
/**
*/
public class FreqTermsEnumTests extends ElasticsearchLuceneTestCase {
public class FreqTermsEnumTests extends ElasticsearchTestCase {
private String[] terms;
private IndexWriter iw;

View File

@ -28,9 +28,9 @@ import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.docset.AndDocIdSet;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
public class AndDocIdSetTests extends ElasticsearchLuceneTestCase {
public class AndDocIdSetTests extends ElasticsearchTestCase {
private static FixedBitSet randomBitSet(int numDocs) {
FixedBitSet b = new FixedBitSet(numDocs);

View File

@ -32,8 +32,9 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -48,7 +49,7 @@ import static org.hamcrest.core.IsEqual.equalTo;
/**
*/
public class XBooleanFilterTests extends ElasticsearchLuceneTestCase {
public class XBooleanFilterTests extends ElasticsearchTestCase {
private Directory directory;
private LeafReader reader;

View File

@ -19,6 +19,7 @@
package org.elasticsearch.common.lucene.uid;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
@ -27,7 +28,6 @@ import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.document.*;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.*;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Numbers;
@ -35,7 +35,7 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
import org.elasticsearch.index.merge.policy.ElasticsearchMergePolicy;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.hamcrest.MatcherAssert;
import org.junit.Test;
@ -46,7 +46,7 @@ import java.util.Map;
import static org.hamcrest.Matchers.*;
public class VersionsTests extends ElasticsearchLuceneTestCase {
public class VersionsTests extends ElasticsearchTestCase {
public static DirectoryReader reopen(DirectoryReader reader) throws IOException {
return reopen(reader, true);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.deleteByQuery;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.action.ActionWriteResponse;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.deletebyquery.DeleteByQueryRequestBuilder;
@ -38,6 +39,7 @@ import java.util.concurrent.ExecutionException;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
@Slow
public class DeleteByQueryTests extends ElasticsearchIntegrationTest {
@Test

View File

@ -19,6 +19,7 @@
package org.elasticsearch.discovery;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.settings.ImmutableSettings;
@ -36,6 +37,7 @@ import java.util.concurrent.ExecutionException;
import static org.hamcrest.Matchers.equalTo;
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
@Slow
public class ZenUnicastDiscoveryTests extends ElasticsearchIntegrationTest {
private ClusterDiscoveryConfiguration discoveryConfig;

View File

@ -19,6 +19,7 @@
package org.elasticsearch.discovery.zen;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
@ -59,6 +60,7 @@ import static org.hamcrest.Matchers.*;
/**
*/
@ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST, numDataNodes = 0, numClientNodes = 0)
@Slow
public class ZenDiscoveryTests extends ElasticsearchIntegrationTest {
@Test

View File

@ -37,6 +37,7 @@ import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.netty.NettyTransport;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.junit.Test;
import static org.hamcrest.Matchers.equalTo;
@ -44,6 +45,7 @@ import static org.hamcrest.Matchers.equalTo;
/**
*
*/
@Slow
public class UnicastZenPingTests extends ElasticsearchTestCase {
@Test

View File

@ -42,7 +42,7 @@ public class EnvironmentTests extends ElasticsearchTestCase {
public Environment newEnvironment(Settings settings) throws IOException {
Settings build = ImmutableSettings.builder()
.put(settings)
.put("path.home", newTempDirPath().toAbsolutePath())
.put("path.home", createTempDir().toAbsolutePath())
.putArray("path.data", tmpPaths()).build();
return new Environment(build);
}

View File

@ -20,8 +20,10 @@ package org.elasticsearch.env;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
@ -45,6 +47,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.hamcrest.CoreMatchers.equalTo;
@LuceneTestCase.SuppressFileSystems("ExtrasFS") // TODO: fix test to allow extras
public class NodeEnvironmentTests extends ElasticsearchTestCase {
private final Settings idxSettings = ImmutableSettings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).build();
@ -69,7 +72,7 @@ public class NodeEnvironmentTests extends ElasticsearchTestCase {
assertEquals(env.nodeDataPaths().length, dataPaths.length);
for (int i = 0; i < dataPaths.length; i++) {
assertTrue(env.nodeDataPaths()[i].startsWith(Paths.get(dataPaths[i])));
assertTrue(env.nodeDataPaths()[i].startsWith(PathUtils.get(dataPaths[i])));
}
env.close();
assertTrue("LockedShards: " + env.lockedShards(), env.lockedShards().isEmpty());
@ -312,7 +315,7 @@ public class NodeEnvironmentTests extends ElasticsearchTestCase {
assertTrue("settings with path_data should have a custom data path", NodeEnvironment.hasCustomDataPath(s2));
assertThat(env.shardDataPaths(sid, s1), equalTo(env.shardPaths(sid)));
assertThat(env.shardDataPaths(sid, s2), equalTo(new Path[] {Paths.get("/tmp/foo/0/myindex/0")}));
assertThat(env.shardDataPaths(sid, s2), equalTo(new Path[] {PathUtils.get("/tmp/foo/0/myindex/0")}));
assertThat("shard paths with a custom data_path should contain only regular paths",
env.shardPaths(sid),
@ -326,7 +329,7 @@ public class NodeEnvironmentTests extends ElasticsearchTestCase {
ImmutableSettings.builder().put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH, false).build());
assertThat(env2.shardDataPaths(sid, s1), equalTo(env2.shardPaths(sid)));
assertThat(env2.shardDataPaths(sid, s2), equalTo(new Path[] {Paths.get("/tmp/foo/myindex/0")}));
assertThat(env2.shardDataPaths(sid, s2), equalTo(new Path[] {PathUtils.get("/tmp/foo/myindex/0")}));
assertThat("shard paths with a custom data_path should contain only regular paths",
env2.shardPaths(sid),
@ -342,7 +345,7 @@ public class NodeEnvironmentTests extends ElasticsearchTestCase {
private Path[] stringsToPaths(String[] strings, String additional) {
Path[] locations = new Path[strings.length];
for (int i = 0; i < strings.length; i++) {
locations[i] = Paths.get(strings[i], additional);
locations[i] = PathUtils.get(strings[i], additional);
}
return locations;
}
@ -352,7 +355,7 @@ public class NodeEnvironmentTests extends ElasticsearchTestCase {
final int numPaths = randomIntBetween(1, 3);
final String[] absPaths = new String[numPaths];
for (int i = 0; i < numPaths; i++) {
absPaths[i] = newTempDirPath().toAbsolutePath().toString();
absPaths[i] = createTempDir().toAbsolutePath().toString();
}
return absPaths;
}
@ -366,7 +369,7 @@ public class NodeEnvironmentTests extends ElasticsearchTestCase {
public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException {
Settings build = ImmutableSettings.builder()
.put(settings)
.put("path.home", newTempDirPath().toAbsolutePath().toString())
.put("path.home", createTempDir().toAbsolutePath().toString())
.put(NodeEnvironment.SETTING_CUSTOM_DATA_PATH_ENABLED, true)
.putArray("path.data", tmpPaths()).build();
return new NodeEnvironment(build, new Environment(build));
@ -375,7 +378,7 @@ public class NodeEnvironmentTests extends ElasticsearchTestCase {
public NodeEnvironment newNodeEnvironment(String[] dataPaths, Settings settings) throws IOException {
Settings build = ImmutableSettings.builder()
.put(settings)
.put("path.home", newTempDirPath().toAbsolutePath().toString())
.put("path.home", createTempDir().toAbsolutePath().toString())
.put(NodeEnvironment.SETTING_CUSTOM_DATA_PATH_ENABLED, true)
.putArray("path.data", dataPaths).build();
return new NodeEnvironment(build, new Environment(build));

View File

@ -19,6 +19,7 @@
package org.elasticsearch.gateway;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.gateway;
import com.carrotsearch.randomizedtesting.LifecycleScope;
import com.google.common.collect.Iterators;
import org.apache.lucene.codecs.CodecUtil;
@ -29,6 +28,7 @@ import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestRuleMarkFailure;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalStateException;
@ -72,6 +72,7 @@ import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
@LuceneTestCase.SuppressFileSystems("ExtrasFS") // TODO: fix test to work with ExtrasFS
public class MetaDataStateFormatTest extends ElasticsearchTestCase {
@ -91,7 +92,7 @@ public class MetaDataStateFormatTest extends ElasticsearchTestCase {
return MetaData.Builder.fromXContent(parser);
}
};
Path tmp = newTempDirPath();
Path tmp = createTempDir();
final InputStream resource = this.getClass().getResourceAsStream("global-3.st");
assertThat(resource, notNullValue());
Path dst = tmp.resolve("global-3.st");
@ -105,7 +106,7 @@ public class MetaDataStateFormatTest extends ElasticsearchTestCase {
public void testReadWriteState() throws IOException {
Path[] dirs = new Path[randomIntBetween(1, 5)];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = newTempDirPath(LifecycleScope.TEST);
dirs[i] = createTempDir();
}
final long id = addDummyFiles("foo-", dirs);
Format format = new Format(randomFrom(XContentType.values()), "foo-");
@ -147,7 +148,7 @@ public class MetaDataStateFormatTest extends ElasticsearchTestCase {
public void testVersionMismatch() throws IOException {
Path[] dirs = new Path[randomIntBetween(1, 5)];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = newTempDirPath(LifecycleScope.TEST);
dirs[i] = createTempDir();
}
final long id = addDummyFiles("foo-", dirs);
@ -172,7 +173,7 @@ public class MetaDataStateFormatTest extends ElasticsearchTestCase {
public void testCorruption() throws IOException {
Path[] dirs = new Path[randomIntBetween(1, 5)];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = newTempDirPath(LifecycleScope.TEST);
dirs[i] = createTempDir();
}
final long id = addDummyFiles("foo-", dirs);
Format format = new Format(randomFrom(XContentType.values()), "foo-");
@ -246,8 +247,8 @@ public class MetaDataStateFormatTest extends ElasticsearchTestCase {
final ToXContent.Params params = ToXContent.EMPTY_PARAMS;
MetaDataStateFormat<MetaData> format = MetaStateService.globalStateFormat(randomFrom(XContentType.values()), params);
final Path[] dirs = new Path[2];
dirs[0] = newTempDirPath(LifecycleScope.TEST);
dirs[1] = newTempDirPath(LifecycleScope.TEST);
dirs[0] = createTempDir();
dirs[1] = createTempDir();
for (Path dir : dirs) {
Files.createDirectories(dir.resolve(MetaDataStateFormat.STATE_DIR_NAME));
}
@ -291,8 +292,8 @@ public class MetaDataStateFormatTest extends ElasticsearchTestCase {
final ToXContent.Params params = ToXContent.EMPTY_PARAMS;
MetaDataStateFormat<MetaData> format = MetaStateService.globalStateFormat(randomFrom(XContentType.values()), params);
final Path[] dirs = new Path[2];
dirs[0] = newTempDirPath(LifecycleScope.TEST);
dirs[1] = newTempDirPath(LifecycleScope.TEST);
dirs[0] = createTempDir();
dirs[1] = createTempDir();
for (Path dir : dirs) {
Files.createDirectories(dir.resolve(MetaDataStateFormat.STATE_DIR_NAME));
}
@ -333,7 +334,7 @@ public class MetaDataStateFormatTest extends ElasticsearchTestCase {
Set<Path> corruptedFiles = new HashSet<>();
MetaDataStateFormat<MetaData> format = MetaStateService.globalStateFormat(randomFrom(XContentType.values()), params);
for (int i = 0; i < dirs.length; i++) {
dirs[i] = newTempDirPath(LifecycleScope.TEST);
dirs[i] = createTempDir();
Files.createDirectories(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME));
for (int j = 0; j < numLegacy; j++) {
XContentType type = format.format();
@ -428,7 +429,7 @@ public class MetaDataStateFormatTest extends ElasticsearchTestCase {
@Override
protected Directory newDirectory(Path dir) throws IOException {
MockDirectoryWrapper mock = new MockDirectoryWrapper(getRandom(), super.newDirectory(dir));
closeAfterSuite(new CloseableDirectory(mock, suiteFailureMarker));
closeAfterSuite(mock);
return mock;
}
}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;
@ -113,7 +114,7 @@ public class MetaStateServiceTests extends ElasticsearchTestCase {
private Settings randomSettings() {
ImmutableSettings.Builder builder = ImmutableSettings.builder();
if (randomBoolean()) {
builder.put(MetaStateService.FORMAT_SETTING, randomXContentType().shortName());
builder.put(MetaStateService.FORMAT_SETTING, randomFrom(XContentType.values()).shortName());
}
return builder.build();
}

View File

@ -20,6 +20,8 @@
package org.elasticsearch.gateway;
import com.google.common.collect.ImmutableSet;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
@ -39,6 +41,7 @@ import static org.hamcrest.Matchers.hasItem;
*
*/
@ClusterScope(scope = Scope.TEST, numDataNodes = 0)
@Slow
public class RecoverAfterNodesTests extends ElasticsearchIntegrationTest {
private final static TimeValue BLOCK_WAIT_TIMEOUT = TimeValue.timeValueSeconds(10);

View File

@ -438,11 +438,11 @@ public class RecoveryFromGatewayTests extends ElasticsearchIntegrationTest {
public void testRecoveryDifferentNodeOrderStartup() throws Exception {
// we need different data paths so we make sure we start the second node fresh
final String node_1 = internalCluster().startNode(settingsBuilder().put("path.data", newTempDirPath()).build());
final String node_1 = internalCluster().startNode(settingsBuilder().put("path.data", createTempDir()).build());
client().prepareIndex("test", "type1", "1").setSource("field", "value").execute().actionGet();
internalCluster().startNode(settingsBuilder().put("path.data", newTempDirPath()).build());
internalCluster().startNode(settingsBuilder().put("path.data", createTempDir()).build());
ensureGreen();

View File

@ -20,6 +20,7 @@ package org.elasticsearch.http.netty;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.ImmutableSettings;
@ -210,7 +211,12 @@ public class NettyHttpServerPipeliningTest extends ElasticsearchTestCase {
final int timeout = request.getUri().startsWith("/slow") && decoder.getParameters().containsKey("sleep") ? Integer.valueOf(decoder.getParameters().get("sleep").get(0)) : 0;
if (timeout > 0) {
sleep(timeout);
try {
Thread.sleep(timeout);
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
throw new RuntimeException();
}
}
if (oue != null) {

View File

@ -66,7 +66,7 @@ public class IndexWithShadowReplicasTests extends ElasticsearchIntegrationTest {
.build();
internalCluster().startNodesAsync(3, nodeSettings).get();
final Path dataPath = newTempDirPath();
final Path dataPath = createTempDir();
Settings idxSettings = ImmutableSettings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).build();
@ -80,7 +80,7 @@ public class IndexWithShadowReplicasTests extends ElasticsearchIntegrationTest {
assertAcked(client().admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", newTempDirPath())));
.put("location", createTempDir())));
CreateSnapshotResponse createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("foo").get();
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0));
assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards()));
@ -126,7 +126,7 @@ public class IndexWithShadowReplicasTests extends ElasticsearchIntegrationTest {
internalCluster().startNodesAsync(3, nodeSettings).get();
final String IDX = "test";
final Path dataPath = newTempDirPath();
final Path dataPath = createTempDir();
Settings idxSettings = ImmutableSettings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
@ -191,7 +191,7 @@ public class IndexWithShadowReplicasTests extends ElasticsearchIntegrationTest {
.build();
String node1 = internalCluster().startNode(nodeSettings);
Path dataPath = newTempDirPath();
Path dataPath = createTempDir();
String IDX = "test";
Settings idxSettings = ImmutableSettings.builder()
@ -253,7 +253,7 @@ public class IndexWithShadowReplicasTests extends ElasticsearchIntegrationTest {
.build();
String node1 = internalCluster().startNode(nodeSettings);
Path dataPath = newTempDirPath();
Path dataPath = createTempDir();
String IDX = "test";
Settings idxSettings = ImmutableSettings.builder()
@ -318,7 +318,7 @@ public class IndexWithShadowReplicasTests extends ElasticsearchIntegrationTest {
int nodeCount = randomIntBetween(2, 5);
internalCluster().startNodesAsync(nodeCount, nodeSettings).get();
Path dataPath = newTempDirPath();
Path dataPath = createTempDir();
String IDX = "test";
Settings idxSettings = ImmutableSettings.builder()
@ -362,7 +362,7 @@ public class IndexWithShadowReplicasTests extends ElasticsearchIntegrationTest {
.build();
internalCluster().startNodesAsync(2, nodeSettings).get();
Path dataPath = newTempDirPath();
Path dataPath = createTempDir();
String IDX = "test";
Settings idxSettings = ImmutableSettings.builder()
@ -419,7 +419,7 @@ public class IndexWithShadowReplicasTests extends ElasticsearchIntegrationTest {
.build();
internalCluster().startNodesAsync(3, nodeSettings).get();
Path dataPath = newTempDirPath();
Path dataPath = createTempDir();
String IDX = "test";
Settings idxSettings = ImmutableSettings.builder()

View File

@ -212,7 +212,7 @@ public class AnalysisModuleTests extends ElasticsearchTestCase {
}
private Path generateWordList(String[] words) throws Exception {
Path wordListFile = newTempDirPath().resolve("wordlist.txt");
Path wordListFile = createTempDir().resolve("wordlist.txt");
try (BufferedWriter writer = Files.newBufferedWriter(wordListFile, StandardCharsets.UTF_8)) {
for (String word : words) {
writer.write(word);

View File

@ -33,7 +33,7 @@ public class HunspellTokenFilterFactoryTests extends ElasticsearchTestCase {
@Test
public void testDedup() throws IOException {
Settings settings = settingsBuilder()
.put("path.conf", getResourcePath("/indices/analyze/conf_dir"))
.put("path.conf", getDataPath("/indices/analyze/conf_dir"))
.put("index.analysis.filter.en_US.type", "hunspell")
.put("index.analysis.filter.en_US.locale", "en_US")
.build();
@ -45,7 +45,7 @@ public class HunspellTokenFilterFactoryTests extends ElasticsearchTestCase {
assertThat(hunspellTokenFilter.dedup(), is(true));
settings = settingsBuilder()
.put("path.conf", getResourcePath("/indices/analyze/conf_dir"))
.put("path.conf", getDataPath("/indices/analyze/conf_dir"))
.put("index.analysis.filter.en_US.type", "hunspell")
.put("index.analysis.filter.en_US.dedup", false)
.put("index.analysis.filter.en_US.locale", "en_US")

View File

@ -37,6 +37,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.hamcrest.Matchers.*;
/**
@ -86,7 +87,7 @@ public class PreBuiltAnalyzerTests extends ElasticsearchSingleNodeTest {
assertThat(list, contains("dude"));
}
ts.close();
version = randomVersion();
version = randomVersion(random());
}
}
@ -121,7 +122,7 @@ public class PreBuiltAnalyzerTests extends ElasticsearchSingleNodeTest {
assertThat(list, contains("dude"));
}
ts.close();
version = randomVersion();
version = randomVersion(random());
}
}
@ -152,7 +153,7 @@ public class PreBuiltAnalyzerTests extends ElasticsearchSingleNodeTest {
PreBuiltAnalyzers randomPreBuiltAnalyzer = PreBuiltAnalyzers.values()[randomInt];
String analyzerName = randomPreBuiltAnalyzer.name().toLowerCase(Locale.ROOT);
Version randomVersion = randomVersion();
Version randomVersion = randomVersion(random());
Settings indexSettings = ImmutableSettings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, randomVersion).build();
NamedAnalyzer namedAnalyzer = new PreBuiltAnalyzerProvider(analyzerName, AnalyzerScope.INDEX, randomPreBuiltAnalyzer.getAnalyzer(randomVersion)).get();

View File

@ -26,8 +26,8 @@ import org.apache.lucene.analysis.snowball.SnowballFilter;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.ElasticsearchTokenStreamTestCase;
import org.elasticsearch.test.VersionUtils;
import org.junit.Test;
import java.io.IOException;
@ -47,7 +47,7 @@ public class StemmerTokenFilterFactoryTests extends ElasticsearchTokenStreamTest
int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) {
Version v = ElasticsearchTestCase.randomVersion(random());
Version v = VersionUtils.randomVersion(random());
Settings settings = ImmutableSettings.settingsBuilder()
.put("index.analysis.filter.my_english.type", "stemmer")
.put("index.analysis.filter.my_english.language", "english")
@ -80,7 +80,7 @@ public class StemmerTokenFilterFactoryTests extends ElasticsearchTokenStreamTest
int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) {
Version v = ElasticsearchTestCase.randomVersion(random());
Version v = VersionUtils.randomVersion(random());
Settings settings = ImmutableSettings.settingsBuilder()
.put("index.analysis.filter.my_porter2.type", "stemmer")
.put("index.analysis.filter.my_porter2.language", "porter2")

View File

@ -36,23 +36,17 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.test.ElasticsearchSingleNodeLuceneTestCase;
import org.junit.Before;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
import static org.hamcrest.Matchers.instanceOf;
public class CodecTests extends ElasticsearchSingleNodeLuceneTestCase {
@Override
@Before
public void setUp() throws Exception {
super.setUp();
forceDefaultCodec(); // we test against default codec so never get a random one here!
}
@SuppressCodecs("*") // we test against default codec so never get a random one here!
public class CodecTests extends ElasticsearchSingleNodeTest {
@Test
public void testResolveDefaultCodecs() throws Exception {

View File

@ -41,7 +41,7 @@ import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -89,7 +89,7 @@ import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogSizeMatcher;
import org.elasticsearch.index.translog.fs.FsTranslog;
import org.elasticsearch.test.DummyShardLock;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.hamcrest.MatcherAssert;
import org.junit.After;
@ -102,21 +102,17 @@ import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomDouble;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA;
import static org.elasticsearch.test.ElasticsearchTestCase.assertBusy;
import static org.elasticsearch.test.ElasticsearchTestCase.terminate;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@LuceneTestCase.SuppressFileSystems("*") // mock FS causes translog issues recovering sometimes because of their use of globs, see LUCENE-6424
public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// TODO: this guy isn't ready for mock filesystems yet
@SuppressFileSystems("*")
public class InternalEngineTests extends ElasticsearchTestCase {
protected final ShardId shardId = new ShardId(new Index("index"), 1);
@ -1409,6 +1405,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
@Test
public void testIndexWriterInfoStream() {
assumeFalse("who tests the tester?", VERBOSE);
MockAppender mockAppender = new MockAppender();
Logger rootLogger = Logger.getRootLogger();
@ -1437,6 +1434,7 @@ public class InternalEngineTests extends ElasticsearchLuceneTestCase {
// #8603: make sure we can separately log IFD's messages
public void testIndexWriterIFDInfoStream() {
assumeFalse("who tests the tester?", VERBOSE);
MockAppender mockAppender = new MockAppender();
// Works when running this test inside Intellij:

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.engine;
import com.carrotsearch.randomizedtesting.LifecycleScope;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
@ -31,6 +30,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Nullable;
@ -63,7 +63,7 @@ import org.elasticsearch.index.store.distributor.LeastUsedDistributor;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.fs.FsTranslog;
import org.elasticsearch.test.DummyShardLock;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.hamcrest.MatcherAssert;
import org.junit.After;
@ -75,12 +75,7 @@ import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomDouble;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomIntBetween;
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.test.ElasticsearchTestCase.newTempDirPath;
import static org.elasticsearch.test.ElasticsearchTestCase.terminate;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
@ -90,7 +85,7 @@ import static org.hamcrest.Matchers.nullValue;
/**
* TODO: document me!
*/
public class ShadowEngineTests extends ElasticsearchLuceneTestCase {
public class ShadowEngineTests extends ElasticsearchTestCase {
protected final ShardId shardId = new ShardId(new Index("index"), 1);

View File

@ -25,7 +25,6 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.*;
import org.apache.lucene.search.Filter;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.ContentPath;
@ -41,9 +40,6 @@ import org.junit.Before;
import static org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
// we might wanna cut this over to LuceneTestCase
@SuppressCodecs({"Lucene3x", "Lucene40", "Lucene41", "Lucene42", "Lucene45", "Lucene46"})
// avoid codecs that do not support SortedNumerics, SortedSet, etc
public abstract class AbstractFieldDataTests extends ElasticsearchSingleNodeTest {
protected IndexService indexService;

View File

@ -24,11 +24,9 @@ import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.test.ElasticsearchTestCase;
@SuppressCodecs({ "Lucene3x", "Lucene40", "Lucene41", "Lucene42" }) // these codecs dont support missing values
public class ReplaceMissingTests extends ElasticsearchLuceneTestCase {
public class ReplaceMissingTests extends ElasticsearchTestCase {
public void test() throws Exception {
Directory dir = newDirectory();

View File

@ -26,8 +26,7 @@ import org.apache.lucene.index.*;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.junit.BeforeClass;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.junit.Test;
import java.util.Locale;
@ -38,12 +37,7 @@ import static org.hamcrest.core.IsNull.notNullValue;
/**
*/
public class ParentChildFilteredTermsEnumTests extends ElasticsearchLuceneTestCase {
@BeforeClass
public static void before() {
forceDefaultCodec();
}
public class ParentChildFilteredTermsEnumTests extends ElasticsearchTestCase {
@Test
public void testSimple_twoFieldEachUniqueValue() throws Exception {

View File

@ -45,7 +45,7 @@ public class FileBasedMappingsTests extends ElasticsearchTestCase {
private static final String NAME = FileBasedMappingsTests.class.getSimpleName();
public void testFileBasedMappings() throws Exception {
Path configDir = newTempDirPath();
Path configDir = createTempDir();
Path mappingsDir = configDir.resolve("mappings");
Path indexMappings = mappingsDir.resolve("index").resolve("type.json");
Path defaultMappings = mappingsDir.resolve("_default").resolve("type.json");
@ -82,7 +82,7 @@ public class FileBasedMappingsTests extends ElasticsearchTestCase {
Settings settings = ImmutableSettings.builder()
.put(ClusterName.SETTING, NAME)
.put("node.name", NAME)
.put("path.home", newTempDirPath())
.put("path.home", createTempDir())
.put("path.conf", configDir.toAbsolutePath())
.put("http.enabled", false)
.build();

Some files were not shown because too many files have changed in this diff Show More