mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 09:28:27 +00:00
[Rename] Fix issues for gradle precommit task. (#418)
Fix miscellaneous issues identified during `gradle precommit`. These issues are the side effects of the renaming to OpenSearch work. Signed-off-by: Rabi Panda <adnapibar@gmail.com>
This commit is contained in:
parent
d82f838423
commit
eddfe6760d
@ -117,8 +117,10 @@ public class RoundingBenchmark {
|
||||
return timeUnitRoundingUtcDayOfMonthJoda.round(timestamp);
|
||||
}
|
||||
|
||||
private final org.opensearch.common.rounding.Rounding timeUnitRoundingUtcQuarterOfYearJoda =
|
||||
org.opensearch.common.rounding.Rounding.builder(DateTimeUnit.QUARTER).timeZone(DateTimeZone.UTC).build();
|
||||
private final org.opensearch.common.rounding.Rounding timeUnitRoundingUtcQuarterOfYearJoda = org.opensearch.common.rounding.Rounding
|
||||
.builder(DateTimeUnit.QUARTER)
|
||||
.timeZone(DateTimeZone.UTC)
|
||||
.build();
|
||||
private final Rounding timeUnitRoundingUtcQuarterOfYearJava = Rounding.builder(QUARTER_OF_YEAR).timeZone(ZoneOffset.UTC).build();
|
||||
|
||||
@Benchmark
|
||||
@ -147,8 +149,10 @@ public class RoundingBenchmark {
|
||||
return timeUnitRoundingUtcMonthOfYearJoda.round(timestamp);
|
||||
}
|
||||
|
||||
private final org.opensearch.common.rounding.Rounding timeUnitRoundingUtcYearOfCenturyJoda =
|
||||
org.opensearch.common.rounding.Rounding.builder(DateTimeUnit.YEAR_OF_CENTURY).timeZone(DateTimeZone.UTC).build();
|
||||
private final org.opensearch.common.rounding.Rounding timeUnitRoundingUtcYearOfCenturyJoda = org.opensearch.common.rounding.Rounding
|
||||
.builder(DateTimeUnit.YEAR_OF_CENTURY)
|
||||
.timeZone(DateTimeZone.UTC)
|
||||
.build();
|
||||
private final Rounding timeUnitRoundingUtcYearOfCenturyJava = Rounding.builder(YEAR_OF_CENTURY).timeZone(ZoneOffset.UTC).build();
|
||||
|
||||
@Benchmark
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.opensearch.common;
|
||||
|
||||
import org.opensearch.common.Rounding;
|
||||
import org.opensearch.common.time.DateFormatter;
|
||||
import org.opensearch.common.unit.TimeValue;
|
||||
import org.opensearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
|
@ -115,9 +115,9 @@ class PluginBuildPlugin implements Plugin<Project> {
|
||||
baseClass 'org.apache.lucene.util.LuceneTestCase'
|
||||
}
|
||||
IT {
|
||||
baseClass 'org.opensearch.test.ESIntegTestCase'
|
||||
baseClass 'org.opensearch.test.rest.ESRestTestCase'
|
||||
baseClass 'org.opensearch.test.ESSingleNodeTestCase'
|
||||
baseClass 'org.opensearch.test.OpenSearchIntegTestCase'
|
||||
baseClass 'org.opensearch.test.rest.OpenSearchRestTestCase'
|
||||
baseClass 'org.opensearch.test.OpenSearchSingleNodeTestCase'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -64,8 +64,7 @@ public class TestWithSslPlugin implements Plugin<Project> {
|
||||
File keystoreDir = new File(project.getBuildDir(), "keystore/test/ssl");
|
||||
File nodeKeystore = new File(keystoreDir, "test-node.jks");
|
||||
File clientKeyStore = new File(keystoreDir, "test-client.jks");
|
||||
NamedDomainObjectContainer<OpenSearchCluster> clusters = (NamedDomainObjectContainer<OpenSearchCluster>) project
|
||||
.getExtensions()
|
||||
NamedDomainObjectContainer<OpenSearchCluster> clusters = (NamedDomainObjectContainer<OpenSearchCluster>) project.getExtensions()
|
||||
.getByName(TestClustersPlugin.EXTENSION_NAME);
|
||||
clusters.all(c -> {
|
||||
// copy keystores & certs into config/
|
||||
|
@ -48,7 +48,7 @@ public class LoggerUsageTask extends PrecommitTask {
|
||||
@TaskAction
|
||||
public void runLoggerUsageTask() {
|
||||
LoggedExec.javaexec(getProject(), spec -> {
|
||||
spec.setMain("org.opensearch.test.loggerusage.ESLoggerUsageChecker");
|
||||
spec.setMain("org.opensearch.test.loggerusage.OpenSearchLoggerUsageChecker");
|
||||
spec.classpath(getClasspath());
|
||||
getClassDirectories().forEach(spec::args);
|
||||
});
|
||||
|
@ -32,8 +32,8 @@ public class TestingConventionsPrecommitPlugin extends PrecommitPlugin {
|
||||
TestingConventionRule testsRule = t.getNaming().maybeCreate("Tests");
|
||||
testsRule.baseClass("org.apache.lucene.util.LuceneTestCase");
|
||||
TestingConventionRule itRule = t.getNaming().maybeCreate("IT");
|
||||
itRule.baseClass("org.opensearch.test.ESIntegTestCase");
|
||||
itRule.baseClass("org.opensearch.test.rest.ESRestTestCase");
|
||||
itRule.baseClass("org.opensearch.test.OpenSearchIntegTestCase");
|
||||
itRule.baseClass("org.opensearch.test.rest.OpenSearchRestTestCase");
|
||||
});
|
||||
return testingConventions;
|
||||
}
|
||||
|
@ -124,7 +124,8 @@ public class DistroTestPlugin implements Plugin<Project> {
|
||||
System.out.println(ex.getMessage());
|
||||
}
|
||||
|
||||
if ((distribution.getType() == OpenSearchDistribution.Type.DEB || distribution.getType() == OpenSearchDistribution.Type.RPM) && distribution.getBundledJdk()) {
|
||||
if ((distribution.getType() == OpenSearchDistribution.Type.DEB || distribution.getType() == OpenSearchDistribution.Type.RPM)
|
||||
&& distribution.getBundledJdk()) {
|
||||
for (Version version : BuildParams.getBwcVersions().getIndexCompatible()) {
|
||||
if (version.before("6.3.0")) {
|
||||
continue; // before opening xpack
|
||||
@ -204,7 +205,8 @@ public class DistroTestPlugin implements Plugin<Project> {
|
||||
// auto-detection doesn't work.
|
||||
//
|
||||
// The shouldTestDocker property could be null, hence we use Boolean.TRUE.equals()
|
||||
boolean shouldExecute = (type != OpenSearchDistribution.Type.DOCKER) || Boolean.TRUE.equals(vmProject.findProperty("shouldTestDocker"));
|
||||
boolean shouldExecute = (type != OpenSearchDistribution.Type.DOCKER)
|
||||
|| Boolean.TRUE.equals(vmProject.findProperty("shouldTestDocker"));
|
||||
|
||||
if (shouldExecute) {
|
||||
distroTest.configure(t -> t.dependsOn(wrapperTask));
|
||||
@ -359,7 +361,11 @@ public class DistroTestPlugin implements Plugin<Project> {
|
||||
List<OpenSearchDistribution> currentDistros = new ArrayList<>();
|
||||
|
||||
for (Architecture architecture : Architecture.values()) {
|
||||
for (OpenSearchDistribution.Type type : Arrays.asList(OpenSearchDistribution.Type.DEB, OpenSearchDistribution.Type.RPM, OpenSearchDistribution.Type.DOCKER)) {
|
||||
for (OpenSearchDistribution.Type type : Arrays.asList(
|
||||
OpenSearchDistribution.Type.DEB,
|
||||
OpenSearchDistribution.Type.RPM,
|
||||
OpenSearchDistribution.Type.DOCKER
|
||||
)) {
|
||||
for (boolean bundledJdk : Arrays.asList(true, false)) {
|
||||
if (bundledJdk == false) {
|
||||
// We'll never publish an ARM (aarch64) build without a bundled JDK.
|
||||
@ -380,7 +386,10 @@ public class DistroTestPlugin implements Plugin<Project> {
|
||||
}
|
||||
|
||||
for (Architecture architecture : Architecture.values()) {
|
||||
for (OpenSearchDistribution.Platform platform : Arrays.asList(OpenSearchDistribution.Platform.LINUX, OpenSearchDistribution.Platform.WINDOWS)) {
|
||||
for (OpenSearchDistribution.Platform platform : Arrays.asList(
|
||||
OpenSearchDistribution.Platform.LINUX,
|
||||
OpenSearchDistribution.Platform.WINDOWS
|
||||
)) {
|
||||
for (boolean bundledJdk : Arrays.asList(true, false)) {
|
||||
if (bundledJdk == false && architecture != Architecture.X64) {
|
||||
// We will never publish distributions for non-x86 (amd64) platforms
|
||||
@ -389,7 +398,14 @@ public class DistroTestPlugin implements Plugin<Project> {
|
||||
}
|
||||
|
||||
currentDistros.add(
|
||||
createDistro(distributions, architecture, OpenSearchDistribution.Type.ARCHIVE, platform, bundledJdk, VersionProperties.getOpenSearch())
|
||||
createDistro(
|
||||
distributions,
|
||||
architecture,
|
||||
OpenSearchDistribution.Type.ARCHIVE,
|
||||
platform,
|
||||
bundledJdk,
|
||||
VersionProperties.getOpenSearch()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -434,10 +450,14 @@ public class DistroTestPlugin implements Plugin<Project> {
|
||||
return project.getName().contains("windows");
|
||||
}
|
||||
|
||||
private static String distroId(OpenSearchDistribution.Type type, OpenSearchDistribution.Platform platform, boolean bundledJdk, Architecture architecture) {
|
||||
return (type == OpenSearchDistribution.Type.ARCHIVE ? platform + "-" : "") + type + (bundledJdk ? "" : "-no-jdk") + (architecture == Architecture.X64
|
||||
? ""
|
||||
: "-" + architecture.toString().toLowerCase());
|
||||
private static String distroId(
|
||||
OpenSearchDistribution.Type type,
|
||||
OpenSearchDistribution.Platform platform,
|
||||
boolean bundledJdk,
|
||||
Architecture architecture
|
||||
) {
|
||||
return (type == OpenSearchDistribution.Type.ARCHIVE ? platform + "-" : "") + type + (bundledJdk ? "" : "-no-jdk")
|
||||
+ (architecture == Architecture.X64 ? "" : "-" + architecture.toString().toLowerCase());
|
||||
}
|
||||
|
||||
private static String destructiveDistroTestTaskName(OpenSearchDistribution distro) {
|
||||
|
@ -76,7 +76,9 @@ public class GradleDistroTestTask extends VagrantShellTask {
|
||||
line.append(isWindows ? "& .\\gradlew " : "./gradlew ");
|
||||
line.append(taskName);
|
||||
line.append(" --project-cache-dir ");
|
||||
line.append(isWindows ? VagrantMachine.convertWindowsPath(getProject(), cacheDir) : VagrantMachine.convertLinuxPath(getProject(), cacheDir));
|
||||
line.append(
|
||||
isWindows ? VagrantMachine.convertWindowsPath(getProject(), cacheDir) : VagrantMachine.convertLinuxPath(getProject(), cacheDir)
|
||||
);
|
||||
line.append(" -S");
|
||||
line.append(" --parallel");
|
||||
line.append(" -D'org.gradle.logging.level'=" + getProject().getGradle().getStartParameter().getLogLevel());
|
||||
|
@ -41,8 +41,7 @@ public class RestTestUtil {
|
||||
static OpenSearchCluster createTestCluster(Project project, SourceSet sourceSet) {
|
||||
// eagerly create the testCluster container so it is easily available for configuration
|
||||
@SuppressWarnings("unchecked")
|
||||
NamedDomainObjectContainer<OpenSearchCluster> testClusters = (NamedDomainObjectContainer<OpenSearchCluster>) project
|
||||
.getExtensions()
|
||||
NamedDomainObjectContainer<OpenSearchCluster> testClusters = (NamedDomainObjectContainer<OpenSearchCluster>) project.getExtensions()
|
||||
.getByName(TestClustersPlugin.EXTENSION_NAME);
|
||||
return testClusters.create(sourceSet.getName());
|
||||
}
|
||||
@ -81,10 +80,7 @@ public class RestTestUtil {
|
||||
project.getDependencies().add(sourceSet.getImplementationConfigurationName(), project.project(":test:framework"));
|
||||
} else {
|
||||
project.getDependencies()
|
||||
.add(
|
||||
sourceSet.getImplementationConfigurationName(),
|
||||
"org.opensearch.test:framework:" + VersionProperties.getOpenSearch()
|
||||
);
|
||||
.add(sourceSet.getImplementationConfigurationName(), "org.opensearch.test:framework:" + VersionProperties.getOpenSearch());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -91,16 +91,7 @@ public class OpenSearchCluster implements TestClusterConfiguration, Named {
|
||||
this.bwcJdk = bwcJdk;
|
||||
|
||||
this.nodes.add(
|
||||
new OpenSearchNode(
|
||||
path,
|
||||
clusterName + "-0",
|
||||
project,
|
||||
reaper,
|
||||
fileSystemOperations,
|
||||
archiveOperations,
|
||||
workingDirBase,
|
||||
bwcJdk
|
||||
)
|
||||
new OpenSearchNode(path, clusterName + "-0", project, reaper, fileSystemOperations, archiveOperations, workingDirBase, bwcJdk)
|
||||
);
|
||||
// configure the cluster name eagerly so nodes know about it
|
||||
this.nodes.all((node) -> node.defaultConfig.put("cluster.name", safeName(clusterName)));
|
||||
|
@ -89,7 +89,10 @@ public class StandaloneRestIntegTestTask extends Test implements TestClustersAwa
|
||||
public List<ResourceLock> getSharedResources() {
|
||||
List<ResourceLock> locks = new ArrayList<>(super.getSharedResources());
|
||||
BuildServiceRegistryInternal serviceRegistry = getServices().get(BuildServiceRegistryInternal.class);
|
||||
Provider<TestClustersThrottle> throttleProvider = GradleUtils.getBuildService(serviceRegistry, TestClustersPlugin.THROTTLE_SERVICE_NAME);
|
||||
Provider<TestClustersThrottle> throttleProvider = GradleUtils.getBuildService(
|
||||
serviceRegistry,
|
||||
TestClustersPlugin.THROTTLE_SERVICE_NAME
|
||||
);
|
||||
SharedResource resource = serviceRegistry.forService(throttleProvider);
|
||||
|
||||
int nodeCount = clusters.stream().mapToInt(cluster -> cluster.getNodes().size()).sum();
|
||||
|
@ -101,14 +101,7 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
|
||||
}
|
||||
|
||||
public void testBundledJdkDefault() {
|
||||
OpenSearchDistribution distro = checkDistro(
|
||||
createProject(null, false),
|
||||
"testdistro",
|
||||
"5.0.0",
|
||||
Type.ARCHIVE,
|
||||
Platform.LINUX,
|
||||
true
|
||||
);
|
||||
OpenSearchDistribution distro = checkDistro(createProject(null, false), "testdistro", "5.0.0", Type.ARCHIVE, Platform.LINUX, true);
|
||||
assertTrue(distro.getBundledJdk());
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,6 @@
|
||||
*/
|
||||
package org.opensearch.gradle.doc;
|
||||
|
||||
import org.opensearch.gradle.doc.RestTestsFromSnippetsTask;
|
||||
import org.opensearch.gradle.test.GradleUnitTestCase;
|
||||
import org.gradle.api.InvalidUserDataException;
|
||||
import org.junit.Rule;
|
||||
|
@ -1061,7 +1061,8 @@ public class IndicesRequestConvertersTests extends OpenSearchTestCase {
|
||||
putTemplateRequest.version(OpenSearchTestCase.randomInt());
|
||||
}
|
||||
if (OpenSearchTestCase.randomBoolean()) {
|
||||
putTemplateRequest.settings(Settings.builder().put("setting-" + OpenSearchTestCase.randomInt(), OpenSearchTestCase.randomTimeValue()));
|
||||
putTemplateRequest.settings(
|
||||
Settings.builder().put("setting-" + OpenSearchTestCase.randomInt(), OpenSearchTestCase.randomTimeValue()));
|
||||
}
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
if (OpenSearchTestCase.randomBoolean()) {
|
||||
@ -1106,7 +1107,8 @@ public class IndicesRequestConvertersTests extends OpenSearchTestCase {
|
||||
putTemplateRequest.version(OpenSearchTestCase.randomInt());
|
||||
}
|
||||
if (OpenSearchTestCase.randomBoolean()) {
|
||||
putTemplateRequest.settings(Settings.builder().put("setting-" + OpenSearchTestCase.randomInt(), OpenSearchTestCase.randomTimeValue()));
|
||||
putTemplateRequest.settings(
|
||||
Settings.builder().put("setting-" + OpenSearchTestCase.randomInt(), OpenSearchTestCase.randomTimeValue()));
|
||||
}
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
if (OpenSearchTestCase.randomBoolean()) {
|
||||
|
@ -79,7 +79,9 @@ final class JvmOptionsParser {
|
||||
*/
|
||||
public static void main(final String[] args) throws InterruptedException, IOException {
|
||||
if (args.length != 1) {
|
||||
throw new IllegalArgumentException("expected one argument specifying path to OPENSEARCH_PATH_CONF but was " + Arrays.toString(args));
|
||||
throw new IllegalArgumentException(
|
||||
"expected one argument specifying path to OPENSEARCH_PATH_CONF but was " + Arrays.toString(args)
|
||||
);
|
||||
}
|
||||
|
||||
final JvmOptionsParser parser = new JvmOptionsParser();
|
||||
|
@ -65,7 +65,7 @@ class ListPluginsCommand extends EnvironmentAwareCommand {
|
||||
terminal.println(Terminal.Verbosity.SILENT, prefix + plugin.getFileName().toString());
|
||||
PluginInfo info = PluginInfo.readFromProperties(env.pluginsFile().resolve(plugin));
|
||||
terminal.println(Terminal.Verbosity.VERBOSE, info.toString(prefix));
|
||||
if (info.getElasticsearchVersion().equals(Version.CURRENT) == false) {
|
||||
if (info.getOpenSearchVersion().equals(Version.CURRENT) == false) {
|
||||
terminal.errorPrintln(
|
||||
"WARNING: plugin ["
|
||||
+ info.getName()
|
||||
|
@ -1023,10 +1023,7 @@ public class InstallPluginCommandTests extends OpenSearchTestCase {
|
||||
() -> assertInstallPluginFromUrl("analysis-icu", "analysis-icu", url, null, true)
|
||||
);
|
||||
assertThat(e.exitCode, equalTo(ExitCodes.CONFIG));
|
||||
assertThat(
|
||||
e,
|
||||
hasToString(containsString("attempted to install release build of official plugin on snapshot build of OpenSearch"))
|
||||
);
|
||||
assertThat(e, hasToString(containsString("attempted to install release build of official plugin on snapshot build of OpenSearch")));
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/100")
|
||||
|
@ -357,9 +357,9 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
||||
public List<PreBuiltAnalyzerProviderFactory> getPreBuiltAnalyzerProviderFactories() {
|
||||
List<PreBuiltAnalyzerProviderFactory> analyzers = new ArrayList<>();
|
||||
// TODO remove in 8.0
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.ELASTICSEARCH,
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.OPENSEARCH,
|
||||
() -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET)));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH,
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.OPENSEARCH,
|
||||
() -> new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true,
|
||||
CharArraySet.EMPTY_SET)));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("snowball", CachingStrategy.LUCENE,
|
||||
|
@ -66,7 +66,15 @@ public class OpenSearchDashboardsPlugin extends Plugin implements SystemIndexPlu
|
||||
|
||||
public static final Setting<List<String>> OPENSEARCH_DASHBOARDS_INDEX_NAMES_SETTING = Setting.listSetting(
|
||||
"opensearch_dashboards.system_indices",
|
||||
unmodifiableList(Arrays.asList(".opensearch_dashboards", ".opensearch_dashboards_*", ".reporting-*", ".apm-agent-configuration", ".apm-custom-link")),
|
||||
unmodifiableList(
|
||||
Arrays.asList(
|
||||
".opensearch_dashboards",
|
||||
".opensearch_dashboards_*",
|
||||
".reporting-*",
|
||||
".apm-agent-configuration",
|
||||
".apm-custom-link"
|
||||
)
|
||||
),
|
||||
Function.identity(),
|
||||
Property.NodeScope
|
||||
);
|
||||
|
@ -34,7 +34,10 @@ import static org.hamcrest.Matchers.is;
|
||||
public class OpenSearchDashboardsPluginTests extends OpenSearchTestCase {
|
||||
|
||||
public void testOpenSearchDashboardsIndexNames() {
|
||||
assertThat(new OpenSearchDashboardsPlugin().getSettings(), contains(OpenSearchDashboardsPlugin.OPENSEARCH_DASHBOARDS_INDEX_NAMES_SETTING));
|
||||
assertThat(
|
||||
new OpenSearchDashboardsPlugin().getSettings(),
|
||||
contains(OpenSearchDashboardsPlugin.OPENSEARCH_DASHBOARDS_INDEX_NAMES_SETTING)
|
||||
);
|
||||
assertThat(
|
||||
new OpenSearchDashboardsPlugin().getSystemIndexDescriptors(Settings.EMPTY)
|
||||
.stream()
|
||||
|
@ -1202,7 +1202,8 @@ public class QueryAnalyzerTests extends OpenSearchTestCase {
|
||||
public void testToParentBlockJoinQuery() {
|
||||
TermQuery termQuery = new TermQuery(new Term("field", "value"));
|
||||
QueryBitSetProducer queryBitSetProducer = new QueryBitSetProducer(new TermQuery(new Term("_type", "child")));
|
||||
OpenSearchToParentBlockJoinQuery query = new OpenSearchToParentBlockJoinQuery(termQuery, queryBitSetProducer, ScoreMode.None, "child");
|
||||
OpenSearchToParentBlockJoinQuery query =
|
||||
new OpenSearchToParentBlockJoinQuery(termQuery, queryBitSetProducer, ScoreMode.None, "child");
|
||||
Result result = analyze(query, Version.CURRENT);
|
||||
assertFalse(result.verified);
|
||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||
|
@ -312,7 +312,8 @@ public class DiscountedCumulativeGainTests extends OpenSearchTestCase {
|
||||
|
||||
public void testSerialization() throws IOException {
|
||||
DiscountedCumulativeGain original = createTestItem();
|
||||
DiscountedCumulativeGain deserialized = OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()),
|
||||
DiscountedCumulativeGain deserialized =
|
||||
OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()),
|
||||
DiscountedCumulativeGain::new);
|
||||
assertEquals(deserialized, original);
|
||||
assertEquals(deserialized.hashCode(), original.hashCode());
|
||||
|
@ -190,7 +190,8 @@ public class ExpectedReciprocalRankTests extends OpenSearchTestCase {
|
||||
|
||||
public void testSerialization() throws IOException {
|
||||
ExpectedReciprocalRank original = createTestItem();
|
||||
ExpectedReciprocalRank deserialized = OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()),
|
||||
ExpectedReciprocalRank deserialized =
|
||||
OpenSearchTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()),
|
||||
ExpectedReciprocalRank::new);
|
||||
assertEquals(deserialized, original);
|
||||
assertEquals(deserialized.hashCode(), original.hashCode());
|
||||
|
@ -26,7 +26,7 @@ import org.apache.http.util.EntityUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.opensearch.OpenSearchException;;
|
||||
import org.opensearch.OpenSearchException;
|
||||
import org.opensearch.OpenSearchStatusException;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.bulk.BackoffPolicy;
|
||||
|
@ -902,7 +902,8 @@ public class AsyncBulkByScrollActionTests extends OpenSearchTestCase {
|
||||
}
|
||||
if (i == toReject) {
|
||||
responses[i] = new BulkItemResponse(i, item.opType(),
|
||||
new Failure(response.getIndex(), response.getType(), response.getId(), new OpenSearchRejectedExecutionException()));
|
||||
new Failure(response.getIndex(), response.getType(), response.getId(),
|
||||
new OpenSearchRejectedExecutionException()));
|
||||
} else {
|
||||
responses[i] = new BulkItemResponse(i, item.opType(), response);
|
||||
}
|
||||
|
@ -47,7 +47,8 @@ import static org.hamcrest.core.Is.is;
|
||||
// These tests are here today so they have access to a proper REST client. They cannot be in :server:integTest since the REST client needs a
|
||||
// proper transport implementation, and they cannot be REST tests today since they need to restart nodes. When #35599 and friends land we
|
||||
// should be able to move these tests to run against a proper cluster instead. TODO do this.
|
||||
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0, transportClientRatio = 0, autoManageMasterNodes = false)
|
||||
@OpenSearchIntegTestCase.ClusterScope(
|
||||
scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0, transportClientRatio = 0, autoManageMasterNodes = false)
|
||||
public class Zen2RestApiIT extends OpenSearchNetty4IntegTestCase {
|
||||
|
||||
@Override
|
||||
|
@ -71,7 +71,8 @@ public class OpenSearchLoggingHandlerIT extends OpenSearchNetty4IntegTestCase {
|
||||
"hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, writePattern);
|
||||
|
||||
final MockLogAppender.LoggingExpectation flushExpectation =
|
||||
new MockLogAppender.SeenEventExpectation("flush", OpenSearchLoggingHandler.class.getCanonicalName(), Level.TRACE, "*FLUSH*");
|
||||
new MockLogAppender.SeenEventExpectation(
|
||||
"flush", OpenSearchLoggingHandler.class.getCanonicalName(), Level.TRACE, "*FLUSH*");
|
||||
|
||||
final String readPattern =
|
||||
".*\\[length: \\d+" +
|
||||
|
@ -33,7 +33,7 @@ import org.opensearch.common.settings.Setting;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.discovery.DiscoveryModule;
|
||||
import org.opensearch.env.Environment;
|
||||
import org.opensearch.mocksocket.MockHttpServer;
|
||||
import org.elasticsearch.mocksocket.MockHttpServer;
|
||||
import org.opensearch.node.Node;
|
||||
import org.opensearch.plugin.discovery.azure.classic.AzureDiscoveryPlugin;
|
||||
import org.opensearch.plugins.Plugin;
|
||||
@ -125,14 +125,16 @@ public class AzureDiscoveryClusterFormationTests extends OpenSearchIntegTestCase
|
||||
.put(AzureComputeService.Management.ENDPOINT_SETTING.getKey(), "https://" + InetAddress.getLoopbackAddress().getHostAddress() +
|
||||
":" + httpsServer.getAddress().getPort())
|
||||
.put(AzureComputeService.Management.KEYSTORE_PATH_SETTING.getKey(), keyStoreFile.toAbsolutePath())
|
||||
.put(AzureComputeService.Discovery.HOST_TYPE_SETTING.getKey(), AzureSeedHostsProvider.HostType.PUBLIC_IP.name())
|
||||
.put(AzureComputeService.Discovery.HOST_TYPE_SETTING.getKey(),
|
||||
org.opensearch.discovery.azure.classic.AzureSeedHostsProvider.HostType.PUBLIC_IP.name())
|
||||
.put(AzureComputeService.Management.KEYSTORE_PASSWORD_SETTING.getKey(), "keypass")
|
||||
.put(AzureComputeService.Management.KEYSTORE_TYPE_SETTING.getKey(), "jks")
|
||||
.put(AzureComputeService.Management.SERVICE_NAME_SETTING.getKey(), "myservice")
|
||||
.put(AzureComputeService.Management.SUBSCRIPTION_ID_SETTING.getKey(), "subscription")
|
||||
.put(AzureComputeService.Discovery.DEPLOYMENT_NAME_SETTING.getKey(), "mydeployment")
|
||||
.put(AzureComputeService.Discovery.ENDPOINT_NAME_SETTING.getKey(), "myendpoint")
|
||||
.put(AzureComputeService.Discovery.DEPLOYMENT_SLOT_SETTING.getKey(), AzureSeedHostsProvider.Deployment.PRODUCTION.name())
|
||||
.put(AzureComputeService.Discovery.DEPLOYMENT_SLOT_SETTING.getKey(),
|
||||
org.opensearch.discovery.azure.classic.AzureSeedHostsProvider.Deployment.PRODUCTION.name())
|
||||
.build();
|
||||
}
|
||||
|
@ -32,7 +32,8 @@ import org.opensearch.test.OpenSearchIntegTestCase;
|
||||
public class AzureTwoStartedNodesTests extends AbstractAzureComputeServiceTestCase {
|
||||
|
||||
public void testTwoNodesShouldRunUsingPrivateOrPublicIp() {
|
||||
final String hostType = randomFrom(AzureSeedHostsProvider.HostType.values()).getType();
|
||||
final String hostType =
|
||||
randomFrom(org.opensearch.discovery.azure.classic.AzureSeedHostsProvider.HostType.values()).getType();
|
||||
logger.info("--> using azure host type " + hostType);
|
||||
|
||||
final Settings settings = Settings.builder()
|
@ -27,7 +27,7 @@ import org.opensearch.common.network.NetworkService;
|
||||
import org.opensearch.common.settings.MockSecureSettings;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.core.internal.io.IOUtils;
|
||||
import org.opensearch.mocksocket.MockHttpServer;
|
||||
import org.elasticsearch.mocksocket.MockHttpServer;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
import org.opensearch.test.transport.MockTransportService;
|
||||
import org.opensearch.threadpool.TestThreadPool;
|
||||
|
@ -25,7 +25,7 @@ import org.opensearch.common.Strings;
|
||||
import org.opensearch.common.SuppressForbidden;
|
||||
import org.opensearch.common.network.NetworkService;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.mocksocket.MockHttpServer;
|
||||
import org.elasticsearch.mocksocket.MockHttpServer;
|
||||
import org.opensearch.rest.RestStatus;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
|
||||
|
@ -48,7 +48,8 @@ public class ExampleRescoreBuilderTests extends AbstractWireSerializingTestCase<
|
||||
Supplier<ExampleRescoreBuilder> supplier = randomFrom(
|
||||
() -> new ExampleRescoreBuilder(instance.factor(), instance.factorField())
|
||||
.windowSize(randomValueOtherThan(instance.windowSize(), () -> between(0, Integer.MAX_VALUE))),
|
||||
() -> new ExampleRescoreBuilder(randomValueOtherThan(instance.factor(), OpenSearchTestCase::randomFloat), instance.factorField())
|
||||
() -> new ExampleRescoreBuilder(
|
||||
randomValueOtherThan(instance.factor(), OpenSearchTestCase::randomFloat), instance.factorField())
|
||||
.windowSize(instance.windowSize()),
|
||||
() -> new ExampleRescoreBuilder(
|
||||
instance.factor(), randomValueOtherThan(instance.factorField(), () -> randomAlphaOfLength(5)))
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
package org.opensearch.example.resthandler;
|
||||
|
||||
import org.opensearch.mocksocket.MockSocket;
|
||||
import org.elasticsearch.mocksocket.MockSocket;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
|
@ -43,7 +43,7 @@ import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.unit.ByteSizeUnit;
|
||||
import org.opensearch.common.unit.TimeValue;
|
||||
import org.opensearch.common.util.concurrent.CountDown;
|
||||
import org.opensearch.mocksocket.MockHttpServer;
|
||||
import org.elasticsearch.mocksocket.MockHttpServer;
|
||||
import org.opensearch.rest.RestStatus;
|
||||
import org.opensearch.rest.RestUtils;
|
||||
import org.opensearch.test.OpenSearchTestCase;
|
||||
|
@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.opensearch
|
||||
package org.opensearch;
|
||||
|
||||
import org.opensearch.common.network.NetworkModule;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
|
@ -29,7 +29,7 @@ import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
|
||||
public class EvilOpenSearchCliTests extends BaseOpenSearchCliTestCase {
|
||||
public class EvilOpenSearchCliTests extends OpenSearchCliTestCase {
|
||||
|
||||
@SuppressForbidden(reason = "manipulates system properties for testing")
|
||||
public void testPathHome() throws Exception {
|
||||
|
@ -34,7 +34,7 @@ import java.security.cert.Certificate;
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
* Unit tests for ESPolicy: these cannot run with security manager,
|
||||
* Unit tests for OpenSearchPolicy: these cannot run with security manager,
|
||||
* we don't allow messing with the policy
|
||||
*/
|
||||
public class OpenSearchPolicyUnitTests extends OpenSearchTestCase {
|
||||
@ -52,7 +52,8 @@ public class OpenSearchPolicyUnitTests extends OpenSearchTestCase {
|
||||
Permission all = new AllPermission();
|
||||
PermissionCollection allCollection = all.newPermissionCollection();
|
||||
allCollection.add(all);
|
||||
ESPolicy policy = new ESPolicy(Collections.emptyMap(), allCollection, Collections.emptyMap(), true, new Permissions());
|
||||
OpenSearchPolicy policy =
|
||||
new OpenSearchPolicy(Collections.emptyMap(), allCollection, Collections.emptyMap(), true, new Permissions());
|
||||
// restrict ourselves to NoPermission
|
||||
PermissionCollection noPermissions = new Permissions();
|
||||
assertFalse(policy.implies(new ProtectionDomain(null, noPermissions), new FilePermission("foo", "read")));
|
||||
@ -67,7 +68,8 @@ public class OpenSearchPolicyUnitTests extends OpenSearchTestCase {
|
||||
public void testNullLocation() throws Exception {
|
||||
assumeTrue("test cannot run with security manager", System.getSecurityManager() == null);
|
||||
PermissionCollection noPermissions = new Permissions();
|
||||
ESPolicy policy = new ESPolicy(Collections.emptyMap(), noPermissions, Collections.emptyMap(), true, new Permissions());
|
||||
OpenSearchPolicy policy =
|
||||
new OpenSearchPolicy(Collections.emptyMap(), noPermissions, Collections.emptyMap(), true, new Permissions());
|
||||
assertFalse(policy.implies(new ProtectionDomain(new CodeSource(null, (Certificate[]) null), noPermissions),
|
||||
new FilePermission("foo", "read")));
|
||||
}
|
||||
@ -75,7 +77,8 @@ public class OpenSearchPolicyUnitTests extends OpenSearchTestCase {
|
||||
public void testListen() {
|
||||
assumeTrue("test cannot run with security manager", System.getSecurityManager() == null);
|
||||
final PermissionCollection noPermissions = new Permissions();
|
||||
final ESPolicy policy = new ESPolicy(Collections.emptyMap(), noPermissions, Collections.emptyMap(), true, new Permissions());
|
||||
final OpenSearchPolicy policy =
|
||||
new OpenSearchPolicy(Collections.emptyMap(), noPermissions, Collections.emptyMap(), true, new Permissions());
|
||||
assertFalse(
|
||||
policy.implies(
|
||||
new ProtectionDomain(OpenSearchPolicyUnitTests.class.getProtectionDomain().getCodeSource(), noPermissions),
|
||||
@ -87,7 +90,8 @@ public class OpenSearchPolicyUnitTests extends OpenSearchTestCase {
|
||||
assumeTrue("test cannot run with security manager", System.getSecurityManager() == null);
|
||||
final PermissionCollection dataPathPermission = new Permissions();
|
||||
dataPathPermission.add(new FilePermission("/home/opensearch/data/-", "read"));
|
||||
final ESPolicy policy = new ESPolicy(Collections.emptyMap(), new Permissions(), Collections.emptyMap(), true, dataPathPermission);
|
||||
final OpenSearchPolicy policy =
|
||||
new OpenSearchPolicy(Collections.emptyMap(), new Permissions(), Collections.emptyMap(), true, dataPathPermission);
|
||||
assertTrue(
|
||||
policy.implies(
|
||||
new ProtectionDomain(new CodeSource(null, (Certificate[]) null), new Permissions()),
|
||||
|
@ -161,7 +161,7 @@ public class KeystoreManagementTests extends PackagingTestCase {
|
||||
|
||||
assertPasswordProtectedKeystore();
|
||||
|
||||
awaitElasticsearchStartup(runOpenSearchStartCommand(password, true, false));
|
||||
awaitOpenSearchStartup(runOpenSearchStartCommand(password, true, false));
|
||||
ServerUtils.runOpenSearchTests();
|
||||
stopOpenSearch();
|
||||
}
|
||||
@ -192,7 +192,7 @@ public class KeystoreManagementTests extends PackagingTestCase {
|
||||
|
||||
assertPasswordProtectedKeystore();
|
||||
|
||||
awaitElasticsearchStartup(runOpenSearchStartCommand(password, false, true));
|
||||
awaitOpenSearchStartup(runOpenSearchStartCommand(password, false, true));
|
||||
ServerUtils.runOpenSearchTests();
|
||||
stopOpenSearch();
|
||||
}
|
||||
|
@ -169,21 +169,15 @@ public class Archives {
|
||||
assertThat(es.lib, file(Directory, owner, owner, p755));
|
||||
assertThat(Files.exists(es.config("opensearch.keystore")), is(false));
|
||||
|
||||
Stream.of(
|
||||
"opensearch",
|
||||
"opensearch-env",
|
||||
"opensearch-keystore",
|
||||
"opensearch-plugin",
|
||||
"opensearch-shard",
|
||||
"opensearch-node"
|
||||
).forEach(executable -> {
|
||||
Stream.of("opensearch", "opensearch-env", "opensearch-keystore", "opensearch-plugin", "opensearch-shard", "opensearch-node")
|
||||
.forEach(executable -> {
|
||||
|
||||
assertThat(es.bin(executable), file(File, owner, owner, p755));
|
||||
assertThat(es.bin(executable), file(File, owner, owner, p755));
|
||||
|
||||
if (distribution.packaging == Distribution.Packaging.ZIP) {
|
||||
assertThat(es.bin(executable + ".bat"), file(File, owner));
|
||||
}
|
||||
});
|
||||
if (distribution.packaging == Distribution.Packaging.ZIP) {
|
||||
assertThat(es.bin(executable + ".bat"), file(File, owner));
|
||||
}
|
||||
});
|
||||
|
||||
if (distribution.packaging == Distribution.Packaging.ZIP) {
|
||||
Stream.of("opensearch-service.bat", "opensearch-service-mgr.exe", "opensearch-service-x64.exe")
|
||||
@ -237,12 +231,7 @@ public class Archives {
|
||||
return sh.runIgnoreExitCode(script);
|
||||
}
|
||||
|
||||
public static Shell.Result runOpenSearchStartCommand(
|
||||
Installation installation,
|
||||
Shell sh,
|
||||
String keystorePassword,
|
||||
boolean daemonize
|
||||
) {
|
||||
public static Shell.Result runOpenSearchStartCommand(Installation installation, Shell sh, String keystorePassword, boolean daemonize) {
|
||||
final Path pidFile = installation.home.resolve("opensearch.pid");
|
||||
|
||||
assertThat(pidFile, fileDoesNotExist());
|
||||
|
@ -188,7 +188,7 @@ public class Installation {
|
||||
|
||||
public class Executables {
|
||||
|
||||
public final Executable elasticsearch = new Executable("opensearch");
|
||||
public final Executable opensearch = new Executable("opensearch");
|
||||
public final Executable pluginTool = new Executable("opensearch-plugin");
|
||||
public final Executable keystoreTool = new Executable("opensearch-keystore");
|
||||
public final Executable shardTool = new Executable("opensearch-shard");
|
||||
|
@ -236,7 +236,7 @@ public class Packages {
|
||||
return sh.runIgnoreExitCode("service opensearch start");
|
||||
}
|
||||
|
||||
public static void assertOpenSearchearchStarted(Shell sh, Installation installation) throws Exception {
|
||||
public static void assertOpenSearchStarted(Shell sh, Installation installation) throws Exception {
|
||||
waitForOpenSearch(installation);
|
||||
|
||||
if (isSystemd()) {
|
||||
@ -261,7 +261,7 @@ public class Packages {
|
||||
} else {
|
||||
sh.run("service opensearch restart");
|
||||
}
|
||||
assertOpenSearchearchStarted(sh, installation);
|
||||
assertOpenSearchStarted(sh, installation);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -47,7 +47,7 @@ testingConventions {
|
||||
naming.clear()
|
||||
naming {
|
||||
IT {
|
||||
baseClass 'org.opensearch.smoketest.ESSmokeClientTestCase'
|
||||
baseClass 'org.opensearch.smoketest.OpenSearchSmokeClientTestCase'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -167,8 +167,8 @@ tasks.named("testingConventions").configure {
|
||||
baseClass "org.apache.lucene.util.LuceneTestCase"
|
||||
}
|
||||
IT {
|
||||
baseClass "org.opensearch.test.ESIntegTestCase"
|
||||
baseClass "org.opensearch.test.ESSingleNodeTestCase"
|
||||
baseClass "org.opensearch.test.OpenSearchIntegTestCase"
|
||||
baseClass "org.opensearch.test.OpenSearchSingleNodeTestCase"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -81,12 +81,12 @@ public class ValidateIndicesAliasesRequestIT extends OpenSearchSingleNodeTestCas
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return Collections.singletonList(IndicesAliasopensearchplugin.class);
|
||||
return Collections.singletonList(IndicesAliasesPlugin.class);
|
||||
}
|
||||
|
||||
public void testAllowed() {
|
||||
final Settings settings = Settings.builder()
|
||||
.putList(IndicesAliasopensearchplugin.ALLOWED_ORIGINS_SETTING.getKey(), Collections.singletonList("allowed"))
|
||||
.putList(IndicesAliasesPlugin.ALLOWED_ORIGINS_SETTING.getKey(), Collections.singletonList("allowed"))
|
||||
.build();
|
||||
createIndex("index", settings);
|
||||
final IndicesAliasesRequest request = new IndicesAliasesRequest().origin("allowed");
|
||||
@ -102,7 +102,7 @@ public class ValidateIndicesAliasesRequestIT extends OpenSearchSingleNodeTestCas
|
||||
|
||||
public void testNotAllowed() {
|
||||
final Settings settings = Settings.builder()
|
||||
.putList(IndicesAliasopensearchplugin.ALLOWED_ORIGINS_SETTING.getKey(), Collections.singletonList("allowed"))
|
||||
.putList(IndicesAliasesPlugin.ALLOWED_ORIGINS_SETTING.getKey(), Collections.singletonList("allowed"))
|
||||
.build();
|
||||
createIndex("index", settings);
|
||||
final String origin = randomFrom("", "not-allowed");
|
||||
@ -114,11 +114,11 @@ public class ValidateIndicesAliasesRequestIT extends OpenSearchSingleNodeTestCas
|
||||
|
||||
public void testSomeAllowed() {
|
||||
final Settings fooIndexSettings = Settings.builder()
|
||||
.putList(IndicesAliasopensearchplugin.ALLOWED_ORIGINS_SETTING.getKey(), Collections.singletonList("foo_allowed"))
|
||||
.putList(IndicesAliasesPlugin.ALLOWED_ORIGINS_SETTING.getKey(), Collections.singletonList("foo_allowed"))
|
||||
.build();
|
||||
createIndex("foo", fooIndexSettings);
|
||||
final Settings barIndexSettings = Settings.builder()
|
||||
.putList(IndicesAliasopensearchplugin.ALLOWED_ORIGINS_SETTING.getKey(), Collections.singletonList("bar_allowed"))
|
||||
.putList(IndicesAliasesPlugin.ALLOWED_ORIGINS_SETTING.getKey(), Collections.singletonList("bar_allowed"))
|
||||
.build();
|
||||
createIndex("bar", barIndexSettings);
|
||||
final String origin = randomFrom("foo_allowed", "bar_allowed");
|
||||
|
@ -38,7 +38,8 @@ import org.opensearch.test.OpenSearchIntegTestCase;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
@LuceneTestCase.SuppressCodecs("*")
|
||||
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0, minNumDataNodes = 0, maxNumDataNodes = 0)
|
||||
@OpenSearchIntegTestCase.ClusterScope(
|
||||
scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0, minNumDataNodes = 0, maxNumDataNodes = 0)
|
||||
public class RecoveryWithUnsupportedIndicesIT extends OpenSearchIntegTestCase {
|
||||
|
||||
/**
|
||||
|
@ -69,7 +69,8 @@ import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0)
|
||||
@OpenSearchIntegTestCase.ClusterScope(
|
||||
scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0, transportClientRatio = 0)
|
||||
public class RareClusterStateIT extends OpenSearchIntegTestCase {
|
||||
|
||||
@Override
|
||||
|
@ -53,7 +53,8 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 2, numClientNodes = 1, transportClientRatio = 0.0D)
|
||||
@OpenSearchIntegTestCase.ClusterScope(
|
||||
scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 2, numClientNodes = 1, transportClientRatio = 0.0D)
|
||||
public class IndexingPressureIT extends OpenSearchIntegTestCase {
|
||||
|
||||
public static final String INDEX_NAME = "test";
|
||||
|
@ -64,7 +64,7 @@ public class SimpleGetFieldMappingsIT extends OpenSearchIntegTestCase {
|
||||
assertThat(response.mappings().size(), equalTo(1));
|
||||
assertThat(response.mappings().get("index").size(), equalTo(0));
|
||||
|
||||
assertThat(response.fieldMappings("index", "type", "field"), Matchers.nullValue());
|
||||
assertThat(response.fieldMappings("index", "type", "field"), nullValue());
|
||||
}
|
||||
|
||||
private XContentBuilder getMappingForType(String type) throws IOException {
|
||||
|
@ -153,7 +153,8 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
||||
* If the provided key is already present, the corresponding metadata will be replaced
|
||||
*/
|
||||
public void addMetadata(String key, List<String> values) {
|
||||
//we need to enforce this otherwise bw comp doesn't work properly, as "opensearch." was the previous criteria to split headers in two sets
|
||||
// we need to enforce this otherwise bw comp doesn't work properly, as "opensearch."
|
||||
// was the previous criteria to split headers in two sets
|
||||
if (key.startsWith("opensearch.") == false) {
|
||||
throw new IllegalArgumentException("exception metadata must start with [opensearch.], found [" + key + "] instead");
|
||||
}
|
||||
@ -184,7 +185,8 @@ public class OpenSearchException extends RuntimeException implements ToXContentF
|
||||
* This method will replace existing header if a header with the same key already exists
|
||||
*/
|
||||
public void addHeader(String key, List<String> value) {
|
||||
//we need to enforce this otherwise bw comp doesn't work properly, as "opensearch." was the previous criteria to split headers in two sets
|
||||
// we need to enforce this otherwise bw comp doesn't work properly, as "opensearch."
|
||||
// was the previous criteria to split headers in two sets
|
||||
if (key.startsWith("opensearch.")) {
|
||||
throw new IllegalArgumentException("exception headers must not start with [opensearch.], found [" + key + "] instead");
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ package org.opensearch.action.admin.indices.mapping.get;
|
||||
|
||||
import org.opensearch.OpenSearchException;
|
||||
import org.opensearch.Version;
|
||||
import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata;
|
||||
import org.opensearch.action.support.ActionFilters;
|
||||
import org.opensearch.action.support.single.shard.TransportSingleShardAction;
|
||||
import org.opensearch.cluster.ClusterState;
|
||||
@ -111,10 +112,10 @@ public class TransportGetFieldMappingsIndexAction
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, Map<String, GetFieldMappingsResponse.FieldMappingMetadata>> typeMappings = new HashMap<>();
|
||||
Map<String, Map<String, FieldMappingMetadata>> typeMappings = new HashMap<>();
|
||||
for (String type : typeIntersection) {
|
||||
DocumentMapper documentMapper = indexService.mapperService().documentMapper(type);
|
||||
Map<String, GetFieldMappingsResponse.FieldMappingMetadata> fieldMapping = findFieldMappingsByType(fieldPredicate, documentMapper, request);
|
||||
Map<String, FieldMappingMetadata> fieldMapping = findFieldMappingsByType(fieldPredicate, documentMapper, request);
|
||||
if (!fieldMapping.isEmpty()) {
|
||||
typeMappings.put(type, fieldMapping);
|
||||
}
|
||||
@ -169,10 +170,10 @@ public class TransportGetFieldMappingsIndexAction
|
||||
}
|
||||
};
|
||||
|
||||
private static Map<String, GetFieldMappingsResponse.FieldMappingMetadata> findFieldMappingsByType(Predicate<String> fieldPredicate,
|
||||
DocumentMapper documentMapper,
|
||||
GetFieldMappingsIndexRequest request) {
|
||||
Map<String, GetFieldMappingsResponse.FieldMappingMetadata> fieldMappings = new HashMap<>();
|
||||
private static Map<String, FieldMappingMetadata> findFieldMappingsByType(Predicate<String> fieldPredicate,
|
||||
DocumentMapper documentMapper,
|
||||
GetFieldMappingsIndexRequest request) {
|
||||
Map<String, FieldMappingMetadata> fieldMappings = new HashMap<>();
|
||||
final MappingLookup allFieldMappers = documentMapper.mappers();
|
||||
for (String field : request.fields()) {
|
||||
if (Regex.isMatchAllPattern(field)) {
|
||||
@ -192,7 +193,7 @@ public class TransportGetFieldMappingsIndexAction
|
||||
if (fieldMapper != null) {
|
||||
addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults());
|
||||
} else if (request.probablySingleFieldRequest()) {
|
||||
fieldMappings.put(field, GetFieldMappingsResponse.FieldMappingMetadata.NULL);
|
||||
fieldMappings.put(field, FieldMappingMetadata.NULL);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -200,7 +201,7 @@ public class TransportGetFieldMappingsIndexAction
|
||||
}
|
||||
|
||||
private static void addFieldMapper(Predicate<String> fieldPredicate,
|
||||
String field, Mapper fieldMapper, Map<String, GetFieldMappingsResponse.FieldMappingMetadata> fieldMappings,
|
||||
String field, Mapper fieldMapper, Map<String, FieldMappingMetadata> fieldMappings,
|
||||
boolean includeDefaults) {
|
||||
if (fieldMappings.containsKey(field)) {
|
||||
return;
|
||||
@ -209,7 +210,7 @@ public class TransportGetFieldMappingsIndexAction
|
||||
try {
|
||||
BytesReference bytes = XContentHelper.toXContent(fieldMapper, XContentType.JSON,
|
||||
includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS, false);
|
||||
fieldMappings.put(field, new GetFieldMappingsResponse.FieldMappingMetadata(fieldMapper.name(), bytes));
|
||||
fieldMappings.put(field, new FieldMappingMetadata(fieldMapper.name(), bytes));
|
||||
} catch (IOException e) {
|
||||
throw new OpenSearchException("failed to serialize XContent of field [" + field + "]", e);
|
||||
}
|
||||
|
@ -89,7 +89,8 @@ final class SearchResponseMerger {
|
||||
|
||||
/**
|
||||
* Add a search response to the list of responses to be merged together into one.
|
||||
* Merges currently happen at once when all responses are available and {@link #getMergedResponse(SearchResponse.Clusters)} )} is called.
|
||||
* Merges currently happen at once when all responses are available and
|
||||
* {@link #getMergedResponse(SearchResponse.Clusters)} )} is called.
|
||||
* That may change in the future as it's possible to introduce incremental merges as responses come in if necessary.
|
||||
*/
|
||||
void add(SearchResponse searchResponse) {
|
||||
|
@ -135,7 +135,8 @@ abstract class SearchScrollAsyncAction<T extends SearchPhaseResult> implements R
|
||||
ex, null, () -> SearchScrollAsyncAction.this.moveToNextPhase(clusterNodeLookup));
|
||||
continue;
|
||||
}
|
||||
final InternalScrollSearchRequest internalRequest = TransportSearchHelper.internalScrollSearchRequest(target.getSearchContextId(), request);
|
||||
final InternalScrollSearchRequest internalRequest =
|
||||
TransportSearchHelper.internalScrollSearchRequest(target.getSearchContextId(), request);
|
||||
// we can't create a SearchShardTarget here since we don't know the index and shard ID we are talking to
|
||||
// we only know the node and the search context ID. Yet, the response will contain the SearchShardTarget
|
||||
// from the target node instead...that's why we pass null here
|
||||
|
@ -57,8 +57,8 @@ import static org.opensearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_
|
||||
|
||||
/**
|
||||
* We enforce bootstrap checks once a node has the transport protocol bound to a non-loopback interface or if the system property {@code
|
||||
* opensearch.enforce.bootstrap.checks} is set to {@true}. In this case we assume the node is running in production and all bootstrap checks must
|
||||
* pass.
|
||||
* opensearch.enforce.bootstrap.checks} is set to {@true}. In this case we assume the node is running in production and
|
||||
* all bootstrap checks must pass.
|
||||
*/
|
||||
final class BootstrapChecks {
|
||||
|
||||
@ -69,8 +69,8 @@ final class BootstrapChecks {
|
||||
|
||||
/**
|
||||
* Executes the bootstrap checks if the node has the transport protocol bound to a non-loopback interface. If the system property
|
||||
* {@code opensearch.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless of whether or not
|
||||
* the transport protocol is bound to a non-loopback interface.
|
||||
* {@code opensearch.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless
|
||||
* of whether or not the transport protocol is bound to a non-loopback interface.
|
||||
*
|
||||
* @param context the current node bootstrap context
|
||||
* @param boundTransportAddress the node network bindings
|
||||
@ -87,8 +87,8 @@ final class BootstrapChecks {
|
||||
|
||||
/**
|
||||
* Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. If the system
|
||||
* property {@code opensearch.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless of whether
|
||||
* or not the transport protocol is bound to a non-loopback interface.
|
||||
* property {@code opensearch.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced
|
||||
* regardless of whether or not the transport protocol is bound to a non-loopback interface.
|
||||
*
|
||||
* @param context the current node boostrap context
|
||||
* @param enforceLimits {@code true} if the checks should be enforced or otherwise warned
|
||||
@ -103,8 +103,8 @@ final class BootstrapChecks {
|
||||
|
||||
/**
|
||||
* Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. If the system
|
||||
* property {@code opensearch.enforce.bootstrap.checks }is set to {@code true} then the bootstrap checks will be enforced regardless of whether
|
||||
* or not the transport protocol is bound to a non-loopback interface.
|
||||
* property {@code opensearch.enforce.bootstrap.checks }is set to {@code true} then the bootstrap checks will be enforced
|
||||
* regardless of whether or not the transport protocol is bound to a non-loopback interface.
|
||||
*
|
||||
* @param context the current node boostrap context
|
||||
* @param enforceLimits {@code true} if the checks should be enforced or otherwise warned
|
||||
|
@ -118,7 +118,8 @@ final class Security {
|
||||
|
||||
// enable security policy: union of template and environment-based paths, and possibly plugin permissions
|
||||
Map<String, URL> codebases = getCodebaseJarMap(JarHell.parseClassPath());
|
||||
Policy.setPolicy(new OpenSearchPolicy(codebases, createPermissions(environment), getPluginPermissions(environment), filterBadDefaults,
|
||||
Policy.setPolicy(new OpenSearchPolicy(codebases, createPermissions(environment),
|
||||
getPluginPermissions(environment), filterBadDefaults,
|
||||
createRecursiveDataPathPermission(environment)));
|
||||
|
||||
// enable security manager
|
||||
|
@ -445,7 +445,8 @@ public class InternalClusterInfoService implements ClusterInfoService, ClusterSt
|
||||
|
||||
@Override
|
||||
public void onRejection(Exception e) {
|
||||
final boolean shutDown = e instanceof OpenSearchRejectedExecutionException && ((OpenSearchRejectedExecutionException) e).isExecutorShutdown();
|
||||
final boolean shutDown = e instanceof OpenSearchRejectedExecutionException &&
|
||||
((OpenSearchRejectedExecutionException) e).isExecutorShutdown();
|
||||
logger.log(shutDown ? Level.DEBUG : Level.WARN, "refreshing cluster info rejected [{}]", reason, e);
|
||||
}
|
||||
}
|
||||
|
@ -1073,7 +1073,8 @@ public class Coordinator extends AbstractLifecycleComponent implements Discovery
|
||||
}
|
||||
|
||||
@Override
|
||||
public void publish(ClusterChangedEvent clusterChangedEvent, ActionListener<Void> publishListener, ClusterStatePublisher.AckListener ackListener) {
|
||||
public void publish(ClusterChangedEvent clusterChangedEvent,
|
||||
ActionListener<Void> publishListener, ClusterStatePublisher.AckListener ackListener) {
|
||||
try {
|
||||
synchronized (mutex) {
|
||||
if (mode != Mode.LEADER || getCurrentTerm() != clusterChangedEvent.state().term()) {
|
||||
@ -1302,7 +1303,8 @@ public class Coordinator extends AbstractLifecycleComponent implements Discovery
|
||||
private boolean receivedJoinsProcessed;
|
||||
|
||||
CoordinatorPublication(PublishRequest publishRequest, PublicationTransportHandler.PublicationContext publicationContext,
|
||||
ListenableFuture<Void> localNodeAckEvent, ClusterStatePublisher.AckListener ackListener, ActionListener<Void> publishListener) {
|
||||
ListenableFuture<Void> localNodeAckEvent, ClusterStatePublisher.AckListener ackListener,
|
||||
ActionListener<Void> publishListener) {
|
||||
super(publishRequest,
|
||||
new ClusterStatePublisher.AckListener() {
|
||||
@Override
|
||||
|
@ -35,7 +35,8 @@ import java.util.Set;
|
||||
public abstract class SecureSetting<T> extends Setting<T> {
|
||||
|
||||
/** Determines whether legacy settings with sensitive values should be allowed. */
|
||||
private static final boolean ALLOW_INSECURE_SETTINGS = Booleans.parseBoolean(System.getProperty("opensearch.allow_insecure_settings", "false"));
|
||||
private static final boolean ALLOW_INSECURE_SETTINGS =
|
||||
Booleans.parseBoolean(System.getProperty("opensearch.allow_insecure_settings", "false"));
|
||||
|
||||
private static final Set<Property> ALLOWED_PROPERTIES = EnumSet.of(Property.Deprecated, Property.Consistent);
|
||||
|
||||
|
@ -97,7 +97,8 @@ public class OpenSearchExecutors {
|
||||
}
|
||||
|
||||
public static PrioritizedOpenSearchThreadPoolExecutor newSinglePrioritizing(String name, ThreadFactory threadFactory,
|
||||
ThreadContext contextHolder, ScheduledExecutorService timer) {
|
||||
ThreadContext contextHolder,
|
||||
ScheduledExecutorService timer) {
|
||||
return new PrioritizedOpenSearchThreadPoolExecutor(name, 1, 1, 0L, TimeUnit.MILLISECONDS, threadFactory, contextHolder, timer);
|
||||
}
|
||||
|
||||
@ -105,7 +106,8 @@ public class OpenSearchExecutors {
|
||||
ThreadFactory threadFactory, ThreadContext contextHolder) {
|
||||
ExecutorScalingQueue<Runnable> queue = new ExecutorScalingQueue<>();
|
||||
OpenSearchThreadPoolExecutor executor =
|
||||
new OpenSearchThreadPoolExecutor(name, min, max, keepAliveTime, unit, queue, threadFactory, new ForceQueuePolicy(), contextHolder);
|
||||
new OpenSearchThreadPoolExecutor(name, min, max, keepAliveTime, unit, queue, threadFactory,
|
||||
new ForceQueuePolicy(), contextHolder);
|
||||
queue.executor = executor;
|
||||
return executor;
|
||||
}
|
||||
|
@ -47,7 +47,8 @@ public class OpenSearchThreadPoolExecutor extends ThreadPoolExecutor {
|
||||
|
||||
OpenSearchThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit,
|
||||
BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory, ThreadContext contextHolder) {
|
||||
this(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, new OpenSearchAbortPolicy(), contextHolder);
|
||||
this(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory,
|
||||
new OpenSearchAbortPolicy(), contextHolder);
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "properly rethrowing errors, see OpenSearchExecutors.rethrowErrors")
|
||||
|
@ -49,7 +49,8 @@ public class PrioritizedOpenSearchThreadPoolExecutor extends OpenSearchThreadPoo
|
||||
private final ScheduledExecutorService timer;
|
||||
|
||||
public PrioritizedOpenSearchThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit,
|
||||
ThreadFactory threadFactory, ThreadContext contextHolder, ScheduledExecutorService timer) {
|
||||
ThreadFactory threadFactory, ThreadContext contextHolder,
|
||||
ScheduledExecutorService timer) {
|
||||
super(name, corePoolSize, maximumPoolSize, keepAliveTime, unit, new PriorityBlockingQueue<>(), threadFactory, contextHolder);
|
||||
this.timer = timer;
|
||||
}
|
||||
|
@ -61,8 +61,8 @@ public final class QueueResizingOpenSearchThreadPoolExecutor extends OpenSearchT
|
||||
QueueResizingOpenSearchThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit,
|
||||
ResizableBlockingQueue<Runnable> workQueue, int minQueueSize, int maxQueueSize,
|
||||
Function<Runnable, WrappedRunnable> runnableWrapper, final int tasksPerFrame,
|
||||
TimeValue targetedResponseTime, ThreadFactory threadFactory, XRejectedExecutionHandler handler,
|
||||
ThreadContext contextHolder) {
|
||||
TimeValue targetedResponseTime, ThreadFactory threadFactory,
|
||||
XRejectedExecutionHandler handler, ThreadContext contextHolder) {
|
||||
super(name, corePoolSize, maximumPoolSize, keepAliveTime, unit,
|
||||
workQueue, threadFactory, handler, contextHolder);
|
||||
this.runnableWrapper = runnableWrapper;
|
||||
|
@ -125,7 +125,8 @@ public class TransportNodesListGatewayStartedShards extends
|
||||
if (request.getCustomDataPath() != null) {
|
||||
customDataPath = request.getCustomDataPath();
|
||||
} else {
|
||||
// TODO: Fallback for BWC with older OpenSearch versions. Remove once request.getCustomDataPath() always returns non-null
|
||||
// TODO: Fallback for BWC with older OpenSearch versions.
|
||||
// Remove once request.getCustomDataPath() always returns non-null
|
||||
final IndexMetadata metadata = clusterService.state().metadata().index(shardId.getIndex());
|
||||
if (metadata != null) {
|
||||
customDataPath = new IndexSettings(metadata, settings).customDataPath();
|
||||
|
@ -170,8 +170,9 @@ public class ReadOnlyEngine extends Engine {
|
||||
// reopened as an internal engine, which would be the path to fix the issue.
|
||||
}
|
||||
|
||||
protected final OpenSearchDirectoryReader wrapReader(DirectoryReader reader,
|
||||
Function<DirectoryReader, DirectoryReader> readerWrapperFunction) throws IOException {
|
||||
protected final OpenSearchDirectoryReader wrapReader(
|
||||
DirectoryReader reader,
|
||||
Function<DirectoryReader, DirectoryReader> readerWrapperFunction) throws IOException {
|
||||
if (engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
|
||||
reader = new SoftDeletesDirectoryReaderWrapper(reader, Lucene.SOFT_DELETES_FIELD);
|
||||
}
|
||||
|
@ -180,7 +180,7 @@ public final class AnalysisModule {
|
||||
preConfiguredTokenFilters.register("lowercase", PreConfiguredTokenFilter.singleton("lowercase", true, LowerCaseFilter::new));
|
||||
// Add "standard" for old indices (bwc)
|
||||
preConfiguredTokenFilters.register( "standard",
|
||||
PreConfiguredTokenFilter.opensearchVersion("standard", true, (reader, version) -> {
|
||||
PreConfiguredTokenFilter.openSearchVersion("standard", true, (reader, version) -> {
|
||||
// This was originally removed in 7_0_0 but due to a cacheing bug it was still possible
|
||||
// in certain circumstances to create a new index referencing the standard token filter
|
||||
// until version 7_5_2
|
||||
|
@ -148,7 +148,8 @@ public class TransportNodesListShardStoreMetadata extends TransportNodesAction<T
|
||||
if (request.getCustomDataPath() != null) {
|
||||
customDataPath = request.getCustomDataPath();
|
||||
} else {
|
||||
// TODO: Fallback for BWC with older predecessor (ES) versions. Remove this once request.getCustomDataPath() always returns non-null
|
||||
// TODO: Fallback for BWC with older predecessor (ES) versions.
|
||||
// Remove this once request.getCustomDataPath() always returns non-null
|
||||
if (indexService != null) {
|
||||
customDataPath = indexService.getIndexSettings().customDataPath();
|
||||
} else {
|
||||
|
@ -70,8 +70,8 @@
|
||||
* | | |- snap-20131011.dat - SMILE serialized {@link org.opensearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot} for
|
||||
* | | | snapshot "20131011"
|
||||
* | | |- index-123 - SMILE serialized {@link org.opensearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots} for
|
||||
* | | | the shard (files with numeric suffixes were created by older versions, newer OpenSearch versions use a uuid
|
||||
* | | | suffix instead)
|
||||
* | | | the shard (files with numeric suffixes were created by older versions, newer OpenSearch
|
||||
* | | | versions use a uuid suffix instead)
|
||||
* | |
|
||||
* | |- 1/ - data for shard "1" of index "foo"
|
||||
* | | |- __1
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.opensearch.rest;
|
||||
|
||||
import org.opensearch.common.util.concurrent.ThreadContext;
|
||||
|
||||
/**
|
||||
* A definition for an http header that should be copied to the {@link org.opensearch.common.util.concurrent.ThreadContext} when
|
||||
|
@ -21,7 +21,6 @@ package org.opensearch.rest;
|
||||
|
||||
import org.opensearch.OpenSearchException;
|
||||
import org.opensearch.common.bytes.BytesReference;
|
||||
import org.opensearch.common.lease.Releasable;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
@ -34,7 +34,8 @@ public class RestUtils {
|
||||
/**
|
||||
* Sets whether we decode a '+' in an url as a space or not.
|
||||
*/
|
||||
private static final boolean DECODE_PLUS_AS_SPACE = Booleans.parseBoolean(System.getProperty("opensearch.rest.url_plus_as_space", "false"));
|
||||
private static final boolean DECODE_PLUS_AS_SPACE =
|
||||
Booleans.parseBoolean(System.getProperty("opensearch.rest.url_plus_as_space", "false"));
|
||||
|
||||
public static final PathTrie.Decoder REST_DECODER = new PathTrie.Decoder() {
|
||||
@Override
|
||||
|
@ -97,9 +97,9 @@ public abstract class AbstractHistogramAggregator extends BucketsAggregator {
|
||||
// the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order
|
||||
CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator());
|
||||
|
||||
EmptyBucketInfo emptyBucketInfo = null;
|
||||
InternalHistogram.EmptyBucketInfo emptyBucketInfo = null;
|
||||
if (minDocCount == 0) {
|
||||
emptyBucketInfo = new EmptyBucketInfo(interval, offset, getEffectiveMin(extendedBounds),
|
||||
emptyBucketInfo = new InternalHistogram.EmptyBucketInfo(interval, offset, getEffectiveMin(extendedBounds),
|
||||
getEffectiveMax(extendedBounds), buildEmptySubAggregations());
|
||||
}
|
||||
return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, metadata());
|
||||
|
@ -160,7 +160,8 @@ public final class AutoQueueAdjustingExecutorBuilder extends ExecutorBuilder<Aut
|
||||
int maxQueueSize = settings.maxQueueSize;
|
||||
int frameSize = settings.frameSize;
|
||||
TimeValue targetedResponseTime = settings.targetedResponseTime;
|
||||
final ThreadFactory threadFactory = OpenSearchExecutors.daemonThreadFactory(OpenSearchExecutors.threadName(settings.nodeName, name()));
|
||||
final ThreadFactory threadFactory =
|
||||
OpenSearchExecutors.daemonThreadFactory(OpenSearchExecutors.threadName(settings.nodeName, name()));
|
||||
final ExecutorService executor =
|
||||
OpenSearchExecutors.newAutoQueueFixed(
|
||||
settings.nodeName + "/" + name(),
|
||||
|
@ -137,7 +137,8 @@ public final class FixedExecutorBuilder extends ExecutorBuilder<FixedExecutorBui
|
||||
ThreadPool.ExecutorHolder build(final FixedExecutorSettings settings, final ThreadContext threadContext) {
|
||||
int size = settings.size;
|
||||
int queueSize = settings.queueSize;
|
||||
final ThreadFactory threadFactory = OpenSearchExecutors.daemonThreadFactory(OpenSearchExecutors.threadName(settings.nodeName, name()));
|
||||
final ThreadFactory threadFactory =
|
||||
OpenSearchExecutors.daemonThreadFactory(OpenSearchExecutors.threadName(settings.nodeName, name()));
|
||||
final ExecutorService executor =
|
||||
OpenSearchExecutors.newFixed(settings.nodeName + "/" + name(), size, queueSize, threadFactory, threadContext);
|
||||
final ThreadPool.Info info =
|
||||
|
@ -95,7 +95,8 @@ public final class ScalingExecutorBuilder extends ExecutorBuilder<ScalingExecuto
|
||||
int core = settings.core;
|
||||
int max = settings.max;
|
||||
final ThreadPool.Info info = new ThreadPool.Info(name(), ThreadPool.ThreadPoolType.SCALING, core, max, keepAlive, null);
|
||||
final ThreadFactory threadFactory = OpenSearchExecutors.daemonThreadFactory(OpenSearchExecutors.threadName(settings.nodeName, name()));
|
||||
final ThreadFactory threadFactory =
|
||||
OpenSearchExecutors.daemonThreadFactory(OpenSearchExecutors.threadName(settings.nodeName, name()));
|
||||
final ExecutorService executor =
|
||||
OpenSearchExecutors.newScaling(
|
||||
settings.nodeName + "/" + name(),
|
||||
|
@ -709,7 +709,7 @@ public class ExceptionSerializationTests extends OpenSearchTestCase {
|
||||
ids.put(32, org.opensearch.indices.InvalidIndexNameException.class);
|
||||
ids.put(33, org.opensearch.indices.IndexPrimaryShardNotAllocatedException.class);
|
||||
ids.put(34, org.opensearch.transport.TransportException.class);
|
||||
ids.put(35, org.opensearch.OpenearchParseException.class);
|
||||
ids.put(35, org.opensearch.OpenSearchParseException.class);
|
||||
ids.put(36, org.opensearch.search.SearchException.class);
|
||||
ids.put(37, org.opensearch.index.mapper.MapperException.class);
|
||||
ids.put(38, org.opensearch.indices.InvalidTypeNameException.class);
|
||||
|
@ -24,15 +24,13 @@ import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.unit.TimeValue;
|
||||
import org.opensearch.test.OpenSearchSingleNodeTestCase;
|
||||
import org.opensearch.action.admin.cluster.state.ClusterStateRequest;
|
||||
import org.opensearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
|
||||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class ClusterStateApiTests extends ESSingleNodeTestCase {
|
||||
public class ClusterStateApiTests extends OpenSearchSingleNodeTestCase {
|
||||
|
||||
public void testWaitForMetadataVersion() throws Exception {
|
||||
ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
|
||||
|
@ -40,16 +40,13 @@ import org.opensearch.threadpool.ThreadPool;
|
||||
import org.opensearch.transport.TransportService;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.opensearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.opensearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.opensearch.action.admin.indices.get.TransportGetIndexAction;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptySet;
|
||||
|
||||
public class GetIndexActionTests extends ESSingleNodeTestCase {
|
||||
public class GetIndexActionTests extends OpenSearchSingleNodeTestCase {
|
||||
|
||||
private TransportService transportService;
|
||||
private ClusterService clusterService;
|
||||
|
@ -28,15 +28,13 @@ import org.opensearch.plugins.Plugin;
|
||||
import org.opensearch.test.OpenSearchSingleNodeTestCase;
|
||||
import org.opensearch.test.InternalSettingsPlugin;
|
||||
import org.junit.Before;
|
||||
import org.opensearch.action.admin.indices.segments.IndicesSegmentResponse;
|
||||
import org.opensearch.action.admin.indices.segments.IndicesSegmentsRequest;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class IndicesSegmentsRequestTests extends ESSingleNodeTestCase {
|
||||
public class IndicesSegmentsRequestTests extends OpenSearchSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
|
@ -121,7 +121,7 @@ public class TransportResizeActionTests extends OpenSearchTestCase {
|
||||
RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
// now we start the shard
|
||||
routingTable = ESAllocationTestCase.startInitializingShardsAndReroute(service, clusterState, "source").routingTable();
|
||||
routingTable = OpenSearchAllocationTestCase.startInitializingShardsAndReroute(service, clusterState, "source").routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
||||
TransportResizeAction.prepareCreateIndexRequest(new ResizeRequest("target", "source"), clusterState,
|
||||
@ -140,7 +140,7 @@ public class TransportResizeActionTests extends OpenSearchTestCase {
|
||||
RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
// now we start the shard
|
||||
routingTable = ESAllocationTestCase.startInitializingShardsAndReroute(service, clusterState, "source").routingTable();
|
||||
routingTable = OpenSearchAllocationTestCase.startInitializingShardsAndReroute(service, clusterState, "source").routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
||||
ResizeRequest resizeRequest = new ResizeRequest("target", "source");
|
||||
@ -170,7 +170,7 @@ public class TransportResizeActionTests extends OpenSearchTestCase {
|
||||
RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
// now we start the shard
|
||||
routingTable = ESAllocationTestCase.startInitializingShardsAndReroute(service, clusterState, "source").routingTable();
|
||||
routingTable = OpenSearchAllocationTestCase.startInitializingShardsAndReroute(service, clusterState, "source").routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
|
||||
ResizeRequest resizeRequest = new ResizeRequest("target", "source");
|
||||
@ -205,7 +205,7 @@ public class TransportResizeActionTests extends OpenSearchTestCase {
|
||||
RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
// now we start the shard
|
||||
routingTable = ESAllocationTestCase.startInitializingShardsAndReroute(service, clusterState, indexName).routingTable();
|
||||
routingTable = OpenSearchAllocationTestCase.startInitializingShardsAndReroute(service, clusterState, indexName).routingTable();
|
||||
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
|
||||
int numSourceShards = clusterState.metadata().index(indexName).getNumberOfShards();
|
||||
DocsStats stats = new DocsStats(between(0, (IndexWriter.MAX_DOCS) / numSourceShards), between(1, 1000), between(1, 10000));
|
||||
|
@ -32,9 +32,6 @@ import org.opensearch.index.engine.CommitStats;
|
||||
import org.opensearch.index.engine.SegmentsStats;
|
||||
import org.opensearch.index.translog.Translog;
|
||||
import org.opensearch.test.OpenSearchSingleNodeTestCase;
|
||||
import org.opensearch.action.admin.indices.stats.CommonStats;
|
||||
import org.opensearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.opensearch.action.admin.indices.stats.ShardStats;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@ -44,7 +41,7 @@ import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class IndicesStatsTests extends ESSingleNodeTestCase {
|
||||
public class IndicesStatsTests extends OpenSearchSingleNodeTestCase {
|
||||
|
||||
public void testSegmentStatsEmptyIndex() {
|
||||
createIndex("test");
|
||||
|
@ -21,15 +21,13 @@ package org.opensearch.action.admin.indices.validate.query;
|
||||
|
||||
import org.opensearch.action.ActionListener;
|
||||
import org.opensearch.test.OpenSearchSingleNodeTestCase;
|
||||
import org.opensearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.opensearch.action.admin.indices.validate.query.ValidateQueryResponse;
|
||||
|
||||
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class TransportValidateQueryActionTests extends ESSingleNodeTestCase {
|
||||
public class TransportValidateQueryActionTests extends OpenSearchSingleNodeTestCase {
|
||||
|
||||
/*
|
||||
* This test covers a fallthrough bug that we had, where if the index we were validating against did not exist, we would invoke the
|
||||
|
@ -72,7 +72,8 @@ public class QueryPhaseResultConsumerTests extends OpenSearchTestCase {
|
||||
};
|
||||
});
|
||||
threadPool = new TestThreadPool(SearchPhaseControllerTests.class.getName());
|
||||
executor = OpenSearchExecutors.newFixed("test", 1, 10, OpenSearchExecutors.daemonThreadFactory("test"), threadPool.getThreadContext());
|
||||
executor = OpenSearchExecutors.newFixed(
|
||||
"test", 1, 10, OpenSearchExecutors.daemonThreadFactory("test"), threadPool.getThreadContext());
|
||||
}
|
||||
|
||||
@After
|
||||
|
@ -470,8 +470,8 @@ public class TransportReplicationActionTests extends OpenSearchTestCase {
|
||||
// finish relocation
|
||||
ShardRouting relocationTarget = clusterService.state().getRoutingTable().shardRoutingTable(shardId)
|
||||
.shardsWithState(ShardRoutingState.INITIALIZING).get(0);
|
||||
AllocationService allocationService = ESAllocationTestCase.createAllocationService();
|
||||
ClusterState updatedState = ESAllocationTestCase.startShardsAndReroute(allocationService, state, relocationTarget);
|
||||
AllocationService allocationService = OpenSearchAllocationTestCase.createAllocationService();
|
||||
ClusterState updatedState = OpenSearchAllocationTestCase.startShardsAndReroute(allocationService, state, relocationTarget);
|
||||
|
||||
setState(clusterService, updatedState);
|
||||
logger.debug("--> relocation complete state:\n{}", clusterService.state());
|
||||
|
@ -56,7 +56,7 @@ import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class GetTermVectorsTests extends ESSingleNodeTestCase {
|
||||
public class GetTermVectorsTests extends OpenSearchSingleNodeTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
|
@ -32,7 +32,7 @@ import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
|
||||
public class OpenSearchCliTests extends BaseOpenSearchCliTestCase {
|
||||
public class OpenSearchCliTests extends OpenSearchCliTestCase {
|
||||
|
||||
public void testVersion() throws Exception {
|
||||
runTestThatVersionIsMutuallyExclusiveToOtherOptions("-V", "-d");
|
||||
|
@ -88,7 +88,8 @@ public class MessagesTests extends OpenSearchTestCase {
|
||||
switch (randomInt(1)) {
|
||||
case 0:
|
||||
// change term
|
||||
return new PublishResponse(randomValueOtherThan(publishResponse.getTerm(), OpenSearchTestCase::randomNonNegativeLong),
|
||||
return new PublishResponse(
|
||||
randomValueOtherThan(publishResponse.getTerm(), OpenSearchTestCase::randomNonNegativeLong),
|
||||
publishResponse.getVersion());
|
||||
case 1:
|
||||
// change version
|
||||
@ -164,7 +165,8 @@ public class MessagesTests extends OpenSearchTestCase {
|
||||
case 1:
|
||||
// change term
|
||||
return new ApplyCommitRequest(applyCommit.getSourceNode(),
|
||||
randomValueOtherThan(applyCommit.getTerm(), OpenSearchTestCase::randomNonNegativeLong), applyCommit.getVersion());
|
||||
randomValueOtherThan(applyCommit.getTerm(), OpenSearchTestCase::randomNonNegativeLong),
|
||||
applyCommit.getVersion());
|
||||
case 2:
|
||||
// change version
|
||||
return new ApplyCommitRequest(applyCommit.getSourceNode(), applyCommit.getTerm(),
|
||||
|
@ -156,7 +156,8 @@ public class ComponentTemplateTests extends AbstractDiffableSerializationTestCas
|
||||
throw new IllegalStateException("illegal randomization branch");
|
||||
}
|
||||
case 1:
|
||||
return new ComponentTemplate(orig.template(), randomValueOtherThan(orig.version(), OpenSearchTestCase::randomNonNegativeLong),
|
||||
return new ComponentTemplate(orig.template(),
|
||||
randomValueOtherThan(orig.version(), OpenSearchTestCase::randomNonNegativeLong),
|
||||
orig.metadata());
|
||||
case 2:
|
||||
return new ComponentTemplate(orig.template(), orig.version(),
|
||||
|
@ -54,7 +54,7 @@ import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.opensearch.common.xcontent.XContentParser;
|
||||
import org.opensearch.common.xcontent.json.JsonXContent;
|
||||
import org.opensearch.index.Index;routing/allocation/AllocationPriorityTests.javarouting/allocation/AllocationPriorityTests.java
|
||||
import org.opensearch.index.Index;
|
||||
import org.opensearch.index.IndexNotFoundException;
|
||||
import org.opensearch.index.shard.ShardId;
|
||||
import org.opensearch.index.shard.ShardNotFoundException;
|
||||
|
@ -468,7 +468,8 @@ public class NodeVersionAllocationDeciderTests extends OpenSearchAllocationTestC
|
||||
RoutingNode newNode = new RoutingNode("newNode", newNode("newNode", Version.CURRENT));
|
||||
RoutingNode oldNode = new RoutingNode("oldNode", newNode("oldNode", VersionUtils.getPreviousVersion()));
|
||||
|
||||
final org.opensearch.cluster.ClusterName clusterName = org.opensearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY);
|
||||
final org.opensearch.cluster.ClusterName clusterName =
|
||||
org.opensearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY);
|
||||
ClusterState clusterState = ClusterState.builder(clusterName).metadata(metadata).routingTable(initialRoutingTable)
|
||||
.nodes(DiscoveryNodes.builder().add(newNode.node()).add(oldNode.node())).build();
|
||||
|
||||
|
@ -51,7 +51,7 @@ public class TrackFailedAllocationNodesTests extends OpenSearchAllocationTestCas
|
||||
for (int i = 0; i < 5; i++) {
|
||||
discoNodes.add(newNode("node-" + i));
|
||||
}
|
||||
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
|
||||
ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
|
||||
.nodes(discoNodes)
|
||||
.metadata(metadata).routingTable(RoutingTable.builder().addAsNew(metadata.index("idx")).build())
|
||||
.build();
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user