Merge remote-tracking branch 'es/7.x' into enrich-7.x
This commit is contained in:
commit
2677ac14d2
19
LICENSE.txt
19
LICENSE.txt
|
@ -1,13 +1,10 @@
|
|||
Source code in this repository is variously licensed under the Apache License
|
||||
Version 2.0, an Apache compatible license, or the Elastic License. Outside of
|
||||
the "x-pack" folder, source code in a given file is licensed under the Apache
|
||||
License Version 2.0, unless otherwise noted at the beginning of the file or a
|
||||
LICENSE file present in the directory subtree declares a separate license.
|
||||
Within the "x-pack" folder, source code in a given file is licensed under the
|
||||
Elastic License, unless otherwise noted at the beginning of the file or a
|
||||
LICENSE file present in the directory subtree declares a separate license.
|
||||
Source code in this repository is covered by one of three licenses: (i) the
|
||||
Apache License 2.0 (ii) an Apache License 2.0 compatible license (iii) the
|
||||
Elastic License. The default license throughout the repository is Apache License
|
||||
2.0 unless the header specifies another license. Elastic Licensed code is found
|
||||
only in the x-pack directory.
|
||||
|
||||
The build produces two sets of binaries - one set that falls under the Elastic
|
||||
License and another set that falls under Apache License Version 2.0. The
|
||||
binaries that contain `-oss` in the artifact name are licensed under the Apache
|
||||
License Version 2.0.
|
||||
License and another set that falls under Apache License 2.0. The binaries that
|
||||
contain `-oss` in the artifact name are licensed under Apache License 2.0 and
|
||||
these binaries do not package any code from the x-pack directory.
|
||||
|
|
|
@ -565,10 +565,10 @@ When running `./gradlew check`, minimal bwc checks are also run against compatib
|
|||
Sometimes a backward compatibility change spans two versions. A common case is a new functionality
|
||||
that needs a BWC bridge in an unreleased versioned of a release branch (for example, 5.x).
|
||||
To test the changes, you can instruct Gradle to build the BWC version from a another remote/branch combination instead of
|
||||
pulling the release branch from GitHub. You do so using the `tests.bwc.remote` and `tests.bwc.refspec.BRANCH` system properties:
|
||||
pulling the release branch from GitHub. You do so using the `bwc.remote` and `bwc.refspec.BRANCH` system properties:
|
||||
|
||||
-------------------------------------------------
|
||||
./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x
|
||||
./gradlew check -Dbwc.remote=${remote} -Dbwc.refspec.5.x=index_req_bwc_5.x
|
||||
-------------------------------------------------
|
||||
|
||||
The branch needs to be available on the remote that the BWC makes of the
|
||||
|
@ -583,7 +583,7 @@ will need to:
|
|||
will contain your change.
|
||||
. Create a branch called `index_req_bwc_5.x` off `5.x`. This will contain your bwc layer.
|
||||
. Push both branches to your remote repository.
|
||||
. Run the tests with `./gradlew check -Dtests.bwc.remote=${remote} -Dtests.bwc.refspec.5.x=index_req_bwc_5.x`.
|
||||
. Run the tests with `./gradlew check -Dbwc.remote=${remote} -Dbwc.refspec.5.x=index_req_bwc_5.x`.
|
||||
|
||||
==== Skip fetching latest
|
||||
|
||||
|
|
|
@ -894,6 +894,9 @@ class BuildPlugin implements Plugin<Project> {
|
|||
// TODO: remove this once ctx isn't added to update script params in 7.0
|
||||
test.systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||
|
||||
// TODO: remove this once cname is prepended to transport.publish_address by default in 8.0
|
||||
test.systemProperty 'es.transport.cname_in_publish_address', 'true'
|
||||
|
||||
test.testLogging { TestLoggingContainer logging ->
|
||||
logging.showExceptions = true
|
||||
logging.showCauses = true
|
||||
|
|
|
@ -21,9 +21,9 @@ package org.elasticsearch.gradle.test
|
|||
import org.apache.tools.ant.DefaultLogger
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.elasticsearch.gradle.BwcVersions
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.Version
|
||||
import org.elasticsearch.gradle.BwcVersions
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
||||
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension
|
||||
|
@ -39,11 +39,13 @@ import org.gradle.api.logging.Logger
|
|||
import org.gradle.api.tasks.Copy
|
||||
import org.gradle.api.tasks.Delete
|
||||
import org.gradle.api.tasks.Exec
|
||||
import org.gradle.internal.jvm.Jvm
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.nio.file.Paths
|
||||
import java.util.concurrent.TimeUnit
|
||||
import java.util.stream.Collectors
|
||||
|
||||
/**
|
||||
* A helper for creating tasks to build a cluster that is used by a task, and tear down the cluster when the task is finished.
|
||||
*/
|
||||
|
@ -917,15 +919,7 @@ class ClusterFormationTasks {
|
|||
onlyIf { node.pidFile.exists() }
|
||||
// the pid file won't actually be read until execution time, since the read is wrapped within an inner closure of the GString
|
||||
ext.pid = "${ -> node.pidFile.getText('UTF-8').trim()}"
|
||||
File jps
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
jps = getJpsExecutableByName(project, "jps.exe")
|
||||
} else {
|
||||
jps = getJpsExecutableByName(project, "jps")
|
||||
}
|
||||
if (!jps.exists()) {
|
||||
throw new GradleException("jps executable not found; ensure that you're running Gradle with the JDK rather than the JRE")
|
||||
}
|
||||
final File jps = Jvm.forHome(project.runtimeJavaHome).getExecutable('jps')
|
||||
commandLine jps, '-l'
|
||||
standardOutput = new ByteArrayOutputStream()
|
||||
doLast {
|
||||
|
@ -944,10 +938,6 @@ class ClusterFormationTasks {
|
|||
}
|
||||
}
|
||||
|
||||
private static File getJpsExecutableByName(Project project, String jpsExecutableName) {
|
||||
return Paths.get(project.runtimeJavaHome.toString(), "bin/" + jpsExecutableName).toFile()
|
||||
}
|
||||
|
||||
/** Adds a task to kill an elasticsearch node with the given pidfile */
|
||||
static Task configureStopTask(String name, Project project, Object depends, NodeInfo node) {
|
||||
return project.tasks.create(name: name, type: LoggedExec, dependsOn: depends) {
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.gradle.vagrant.VagrantExtension;
|
|||
import org.gradle.api.NamedDomainObjectContainer;
|
||||
import org.gradle.api.Plugin;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.Task;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.file.Directory;
|
||||
import org.gradle.api.plugins.ExtraPropertiesExtension;
|
||||
|
@ -74,6 +75,7 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
private static final String COPY_UPGRADE_TASK = "copyUpgradePackages";
|
||||
private static final String COPY_PLUGINS_TASK = "copyPlugins";
|
||||
private static final String IN_VM_SYSPROP = "tests.inVM";
|
||||
private static final String DISTRIBUTION_SYSPROP = "tests.distribution";
|
||||
|
||||
@Override
|
||||
public void apply(Project project) {
|
||||
|
@ -88,14 +90,15 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
Provider<Directory> upgradeDir = project.getLayout().getBuildDirectory().dir("packaging/upgrade");
|
||||
Provider<Directory> pluginsDir = project.getLayout().getBuildDirectory().dir("packaging/plugins");
|
||||
|
||||
configureDistributions(project, upgradeVersion);
|
||||
List<ElasticsearchDistribution> distributions = configureDistributions(project, upgradeVersion);
|
||||
TaskProvider<Copy> copyDistributionsTask = configureCopyDistributionsTask(project, distributionsDir);
|
||||
TaskProvider<Copy> copyUpgradeTask = configureCopyUpgradeTask(project, upgradeVersion, upgradeDir);
|
||||
TaskProvider<Copy> copyPluginsTask = configureCopyPluginsTask(project, pluginsDir);
|
||||
|
||||
Map<String, TaskProvider<?>> distroTests = new HashMap<>();
|
||||
Map<String, TaskProvider<?>> batsTests = new HashMap<>();
|
||||
distroTests.put("distribution", configureDistroTest(project, distributionsDir, copyDistributionsTask));
|
||||
for (ElasticsearchDistribution distribution : distributions) {
|
||||
configureDistroTest(project, distribution);
|
||||
}
|
||||
batsTests.put("bats oss", configureBatsTest(project, "oss", distributionsDir, copyDistributionsTask));
|
||||
batsTests.put("bats default", configureBatsTest(project, "default", distributionsDir, copyDistributionsTask));
|
||||
configureBatsTest(project, "plugins",distributionsDir, copyDistributionsTask, copyPluginsTask).configure(t ->
|
||||
|
@ -108,17 +111,27 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
vmProject.getPluginManager().apply(VagrantBasePlugin.class);
|
||||
vmProject.getPluginManager().apply(JdkDownloadPlugin.class);
|
||||
List<Object> vmDependencies = new ArrayList<>(configureVM(vmProject));
|
||||
// a hack to ensure the parent task has already been run. this will not be necessary once tests are per distribution
|
||||
// which will eliminate the copy distributions task altogether
|
||||
vmDependencies.add(copyDistributionsTask);
|
||||
vmDependencies.add(project.getConfigurations().getByName("testRuntimeClasspath"));
|
||||
|
||||
distroTests.forEach((desc, task) -> configureVMWrapperTask(vmProject, desc, task.getName(), vmDependencies));
|
||||
VagrantExtension vagrant = vmProject.getExtensions().getByType(VagrantExtension.class);
|
||||
TaskProvider<Task> distroTest = vmProject.getTasks().register("distroTest");
|
||||
for (ElasticsearchDistribution distribution : distributions) {
|
||||
String destructiveTaskName = destructiveDistroTestTaskName(distribution);
|
||||
Platform platform = distribution.getPlatform();
|
||||
// this condition ensures windows boxes get windows distributions, and linux boxes get linux distributions
|
||||
if (isWindows(vmProject) == (platform == Platform.WINDOWS)) {
|
||||
TaskProvider<GradleDistroTestTask> vmTask =
|
||||
configureVMWrapperTask(vmProject, distribution.getName() + " distribution", destructiveTaskName, vmDependencies);
|
||||
vmTask.configure(t -> t.dependsOn(distribution));
|
||||
distroTest.configure(t -> t.dependsOn(vmTask));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
batsTests.forEach((desc, task) -> {
|
||||
configureVMWrapperTask(vmProject, desc, task.getName(), vmDependencies).configure(t -> {
|
||||
t.setProgressHandler(new BatsProgressLogger(project.getLogger()));
|
||||
t.onlyIf(spec -> vagrant.isWindowsVM() == false); // bats doesn't run on windows
|
||||
t.onlyIf(spec -> isWindows(vmProject) == false); // bats doesn't run on windows
|
||||
t.dependsOn(copyDistributionsTask);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -166,7 +179,7 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
VagrantExtension vagrant = project.getExtensions().getByType(VagrantExtension.class);
|
||||
vagrant.setBox(box);
|
||||
vagrant.vmEnv("PATH", convertPath(project, vagrant, gradleJdk, "/bin:$PATH", "\\bin;$Env:PATH"));
|
||||
vagrant.setIsWindowsVM(box.contains("windows"));
|
||||
vagrant.setIsWindowsVM(isWindows(project));
|
||||
|
||||
return Arrays.asList(gradleJdk);
|
||||
}
|
||||
|
@ -271,15 +284,14 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
});
|
||||
}
|
||||
|
||||
private static TaskProvider<?> configureDistroTest(Project project, Provider<Directory> distributionsDir,
|
||||
TaskProvider<Copy> copyPackagingArchives) {
|
||||
// TODO: don't run with security manager...
|
||||
return project.getTasks().register("destructiveDistroTest", Test.class,
|
||||
private static TaskProvider<?> configureDistroTest(Project project, ElasticsearchDistribution distribution) {
|
||||
return project.getTasks().register(destructiveDistroTestTaskName(distribution), Test.class,
|
||||
t -> {
|
||||
t.setMaxParallelForks(1);
|
||||
t.setWorkingDir(distributionsDir);
|
||||
t.setWorkingDir(project.getProjectDir());
|
||||
t.systemProperty(DISTRIBUTION_SYSPROP, distribution.toString());
|
||||
if (System.getProperty(IN_VM_SYSPROP) == null) {
|
||||
t.dependsOn(copyPackagingArchives);
|
||||
t.dependsOn(distribution);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -299,7 +311,7 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
});
|
||||
}
|
||||
|
||||
private void configureDistributions(Project project, Version upgradeVersion) {
|
||||
private List<ElasticsearchDistribution> configureDistributions(Project project, Version upgradeVersion) {
|
||||
NamedDomainObjectContainer<ElasticsearchDistribution> distributions = DistributionDownloadPlugin.getContainer(project);
|
||||
List<ElasticsearchDistribution> currentDistros = new ArrayList<>();
|
||||
List<ElasticsearchDistribution> upgradeDistros = new ArrayList<>();
|
||||
|
@ -337,13 +349,15 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
List<Configuration> distroUpgradeConfigs = upgradeDistros.stream().map(ElasticsearchDistribution::getConfiguration)
|
||||
.collect(Collectors.toList());
|
||||
packagingUpgradeConfig.setExtendsFrom(distroUpgradeConfigs);
|
||||
|
||||
return currentDistros;
|
||||
}
|
||||
|
||||
private static void addDistro(NamedDomainObjectContainer<ElasticsearchDistribution> distributions,
|
||||
Type type, Platform platform, Flavor flavor, boolean bundledJdk, String version,
|
||||
List<ElasticsearchDistribution> container) {
|
||||
|
||||
String name = flavor + "-" + (type == Type.ARCHIVE ? platform + "-" : "") + type + (bundledJdk ? "" : "-no-jdk") + "-" + version;
|
||||
String name = distroId(type, platform, flavor, bundledJdk) + "-" + version;
|
||||
if (distributions.findByName(name) != null) {
|
||||
return;
|
||||
}
|
||||
|
@ -358,4 +372,17 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
});
|
||||
container.add(distro);
|
||||
}
|
||||
|
||||
// return true if the project is for a windows VM, false otherwise
|
||||
private static boolean isWindows(Project project) {
|
||||
return project.getName().contains("windows");
|
||||
}
|
||||
|
||||
private static String distroId(Type type, Platform platform, Flavor flavor, boolean bundledJdk) {
|
||||
return flavor + "-" + (type == Type.ARCHIVE ? platform + "-" : "") + type + (bundledJdk ? "" : "-no-jdk");
|
||||
}
|
||||
|
||||
private static String destructiveDistroTestTaskName(ElasticsearchDistribution distro) {
|
||||
return "destructiveDistroTest." + distroId(distro.getType(), distro.getPlatform(), distro.getFlavor(), distro.getBundledJdk());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,6 +63,7 @@ class RestIntegTestTask extends DefaultTask {
|
|||
boolean usesTestclusters = project.plugins.hasPlugin(TestClustersPlugin.class)
|
||||
if (usesTestclusters == false) {
|
||||
clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project)
|
||||
runner.outputs.doNotCacheIf("Caching is disabled when using ClusterFormationTasks", { true })
|
||||
} else {
|
||||
project.testClusters {
|
||||
"$name" {
|
||||
|
|
|
@ -93,7 +93,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
|
|||
ext.set("minimumCompilerVersion", minimumCompilerVersion);
|
||||
ext.set("minimumRuntimeVersion", minimumRuntimeVersion);
|
||||
ext.set("gradleJavaVersion", Jvm.current().getJavaVersion());
|
||||
ext.set("gitRevision", gitRevision(project));
|
||||
ext.set("gitRevision", gitRevision(project.getRootProject().getRootDir()));
|
||||
ext.set("buildDate", ZonedDateTime.now(ZoneOffset.UTC));
|
||||
});
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
|
|||
return _defaultParallel;
|
||||
}
|
||||
|
||||
private String gitRevision(final Project project) {
|
||||
public static String gitRevision(File rootDir) {
|
||||
try {
|
||||
/*
|
||||
* We want to avoid forking another process to run git rev-parse HEAD. Instead, we will read the refs manually. The
|
||||
|
@ -222,7 +222,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
|
|||
* In the case of a worktree, we read the gitdir from the plain text .git file. This resolves to a directory from which we read
|
||||
* the HEAD file and resolve commondir to the plain git repository.
|
||||
*/
|
||||
final Path dotGit = project.getRootProject().getRootDir().toPath().resolve(".git");
|
||||
final Path dotGit = rootDir.toPath().resolve(".git");
|
||||
final String revision;
|
||||
if (Files.exists(dotGit) == false) {
|
||||
return "unknown";
|
||||
|
@ -259,7 +259,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
|
|||
}
|
||||
}
|
||||
|
||||
private String readFirstLine(final Path path) throws IOException {
|
||||
private static String readFirstLine(final Path path) throws IOException {
|
||||
return Files.lines(path, StandardCharsets.UTF_8)
|
||||
.findFirst()
|
||||
.orElseThrow(() -> new IOException("file [" + path + "] is empty"));
|
||||
|
|
|
@ -48,7 +48,7 @@ public class TestClustersRegistry {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
cluster.stop(false);
|
||||
cluster.stop(true);
|
||||
runningClusters.remove(cluster);
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -39,7 +39,6 @@ import org.elasticsearch.client.ml.DeleteFilterRequest;
|
|||
import org.elasticsearch.client.ml.DeleteForecastRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
|
||||
import org.elasticsearch.client.ml.EstimateMemoryUsageRequest;
|
||||
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
|
||||
import org.elasticsearch.client.ml.FindFileStructureRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
|
@ -701,7 +700,7 @@ final class MLRequestConverters {
|
|||
return request;
|
||||
}
|
||||
|
||||
static Request estimateMemoryUsage(EstimateMemoryUsageRequest estimateRequest) throws IOException {
|
||||
static Request estimateMemoryUsage(PutDataFrameAnalyticsRequest estimateRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_ml", "data_frame", "analytics", "_estimate_memory_usage")
|
||||
.build();
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.elasticsearch.client.ml.DeleteForecastRequest;
|
|||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobResponse;
|
||||
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
|
||||
import org.elasticsearch.client.ml.EstimateMemoryUsageRequest;
|
||||
import org.elasticsearch.client.ml.EstimateMemoryUsageResponse;
|
||||
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
|
||||
import org.elasticsearch.client.ml.EvaluateDataFrameResponse;
|
||||
|
@ -2195,14 +2194,15 @@ public final class MachineLearningClient {
|
|||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/estimate-memory-usage-dfanalytics.html">
|
||||
* Estimate Memory Usage for Data Frame Analytics documentation</a>
|
||||
*
|
||||
* @param request The {@link EstimateMemoryUsageRequest}
|
||||
* @param request The {@link PutDataFrameAnalyticsRequest}
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return {@link EstimateMemoryUsageResponse} response object
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public EstimateMemoryUsageResponse estimateMemoryUsage(EstimateMemoryUsageRequest request,
|
||||
public EstimateMemoryUsageResponse estimateMemoryUsage(PutDataFrameAnalyticsRequest request,
|
||||
RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
return restHighLevelClient.performRequestAndParseEntity(
|
||||
request,
|
||||
MLRequestConverters::estimateMemoryUsage,
|
||||
options,
|
||||
EstimateMemoryUsageResponse::fromXContent,
|
||||
|
@ -2216,13 +2216,14 @@ public final class MachineLearningClient {
|
|||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/estimate-memory-usage-dfanalytics.html">
|
||||
* Estimate Memory Usage for Data Frame Analytics documentation</a>
|
||||
*
|
||||
* @param request The {@link EstimateMemoryUsageRequest}
|
||||
* @param request The {@link PutDataFrameAnalyticsRequest}
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void estimateMemoryUsageAsync(EstimateMemoryUsageRequest request, RequestOptions options,
|
||||
public void estimateMemoryUsageAsync(PutDataFrameAnalyticsRequest request, RequestOptions options,
|
||||
ActionListener<EstimateMemoryUsageResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(
|
||||
request,
|
||||
MLRequestConverters::estimateMemoryUsage,
|
||||
options,
|
||||
EstimateMemoryUsageResponse::fromXContent,
|
||||
|
|
|
@ -554,7 +554,8 @@ final class RequestConverters {
|
|||
.withRefresh(reindexRequest.isRefresh())
|
||||
.withTimeout(reindexRequest.getTimeout())
|
||||
.withWaitForActiveShards(reindexRequest.getWaitForActiveShards())
|
||||
.withRequestsPerSecond(reindexRequest.getRequestsPerSecond());
|
||||
.withRequestsPerSecond(reindexRequest.getRequestsPerSecond())
|
||||
.withSlices(reindexRequest.getSlices());
|
||||
|
||||
if (reindexRequest.getScrollTime() != null) {
|
||||
params.putParam("scroll", reindexRequest.getScrollTime());
|
||||
|
@ -895,6 +896,10 @@ final class RequestConverters {
|
|||
return putParam("routing", routing);
|
||||
}
|
||||
|
||||
Params withSlices(int slices) {
|
||||
return putParam("slices", String.valueOf(slices));
|
||||
}
|
||||
|
||||
Params withStoredFields(String[] storedFields) {
|
||||
if (storedFields != null && storedFields.length > 0) {
|
||||
return putParam("stored_fields", String.join(",", storedFields));
|
||||
|
|
|
@ -1,91 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
public class EstimateMemoryUsageRequest implements ToXContentObject, Validatable {
|
||||
|
||||
private static final ParseField DATA_FRAME_ANALYTICS_CONFIG = new ParseField("data_frame_analytics_config");
|
||||
|
||||
private static final ConstructingObjectParser<EstimateMemoryUsageRequest, Void> PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"estimate_memory_usage_request",
|
||||
true,
|
||||
args -> {
|
||||
DataFrameAnalyticsConfig config = (DataFrameAnalyticsConfig) args[0];
|
||||
return new EstimateMemoryUsageRequest(config);
|
||||
});
|
||||
|
||||
static {
|
||||
PARSER.declareObject(constructorArg(), (p, c) -> DataFrameAnalyticsConfig.fromXContent(p), DATA_FRAME_ANALYTICS_CONFIG);
|
||||
}
|
||||
|
||||
public static EstimateMemoryUsageRequest fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private final DataFrameAnalyticsConfig config;
|
||||
|
||||
public EstimateMemoryUsageRequest(DataFrameAnalyticsConfig config) {
|
||||
this.config = Objects.requireNonNull(config);
|
||||
}
|
||||
|
||||
public DataFrameAnalyticsConfig getConfig() {
|
||||
return config;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(DATA_FRAME_ANALYTICS_CONFIG.getPreferredName(), config);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
EstimateMemoryUsageRequest that = (EstimateMemoryUsageRequest) other;
|
||||
return Objects.equals(config, that.config);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(config);
|
||||
}
|
||||
}
|
|
@ -35,10 +35,8 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
|
|||
|
||||
public class EstimateMemoryUsageResponse implements ToXContentObject {
|
||||
|
||||
public static final ParseField EXPECTED_MEMORY_USAGE_WITH_ONE_PARTITION =
|
||||
new ParseField("expected_memory_usage_with_one_partition");
|
||||
public static final ParseField EXPECTED_MEMORY_USAGE_WITH_MAX_PARTITIONS =
|
||||
new ParseField("expected_memory_usage_with_max_partitions");
|
||||
public static final ParseField EXPECTED_MEMORY_WITHOUT_DISK = new ParseField("expected_memory_without_disk");
|
||||
public static final ParseField EXPECTED_MEMORY_WITH_DISK = new ParseField("expected_memory_with_disk");
|
||||
|
||||
static final ConstructingObjectParser<EstimateMemoryUsageResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
|
@ -49,13 +47,13 @@ public class EstimateMemoryUsageResponse implements ToXContentObject {
|
|||
static {
|
||||
PARSER.declareField(
|
||||
optionalConstructorArg(),
|
||||
(p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), EXPECTED_MEMORY_USAGE_WITH_ONE_PARTITION.getPreferredName()),
|
||||
EXPECTED_MEMORY_USAGE_WITH_ONE_PARTITION,
|
||||
(p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), EXPECTED_MEMORY_WITHOUT_DISK.getPreferredName()),
|
||||
EXPECTED_MEMORY_WITHOUT_DISK,
|
||||
ObjectParser.ValueType.VALUE);
|
||||
PARSER.declareField(
|
||||
optionalConstructorArg(),
|
||||
(p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), EXPECTED_MEMORY_USAGE_WITH_MAX_PARTITIONS.getPreferredName()),
|
||||
EXPECTED_MEMORY_USAGE_WITH_MAX_PARTITIONS,
|
||||
(p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), EXPECTED_MEMORY_WITH_DISK.getPreferredName()),
|
||||
EXPECTED_MEMORY_WITH_DISK,
|
||||
ObjectParser.ValueType.VALUE);
|
||||
}
|
||||
|
||||
|
@ -63,33 +61,30 @@ public class EstimateMemoryUsageResponse implements ToXContentObject {
|
|||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
private final ByteSizeValue expectedMemoryUsageWithOnePartition;
|
||||
private final ByteSizeValue expectedMemoryUsageWithMaxPartitions;
|
||||
private final ByteSizeValue expectedMemoryWithoutDisk;
|
||||
private final ByteSizeValue expectedMemoryWithDisk;
|
||||
|
||||
public EstimateMemoryUsageResponse(@Nullable ByteSizeValue expectedMemoryUsageWithOnePartition,
|
||||
@Nullable ByteSizeValue expectedMemoryUsageWithMaxPartitions) {
|
||||
this.expectedMemoryUsageWithOnePartition = expectedMemoryUsageWithOnePartition;
|
||||
this.expectedMemoryUsageWithMaxPartitions = expectedMemoryUsageWithMaxPartitions;
|
||||
public EstimateMemoryUsageResponse(@Nullable ByteSizeValue expectedMemoryWithoutDisk, @Nullable ByteSizeValue expectedMemoryWithDisk) {
|
||||
this.expectedMemoryWithoutDisk = expectedMemoryWithoutDisk;
|
||||
this.expectedMemoryWithDisk = expectedMemoryWithDisk;
|
||||
}
|
||||
|
||||
public ByteSizeValue getExpectedMemoryUsageWithOnePartition() {
|
||||
return expectedMemoryUsageWithOnePartition;
|
||||
public ByteSizeValue getExpectedMemoryWithoutDisk() {
|
||||
return expectedMemoryWithoutDisk;
|
||||
}
|
||||
|
||||
public ByteSizeValue getExpectedMemoryUsageWithMaxPartitions() {
|
||||
return expectedMemoryUsageWithMaxPartitions;
|
||||
public ByteSizeValue getExpectedMemoryWithDisk() {
|
||||
return expectedMemoryWithDisk;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (expectedMemoryUsageWithOnePartition != null) {
|
||||
builder.field(
|
||||
EXPECTED_MEMORY_USAGE_WITH_ONE_PARTITION.getPreferredName(), expectedMemoryUsageWithOnePartition.getStringRep());
|
||||
if (expectedMemoryWithoutDisk != null) {
|
||||
builder.field(EXPECTED_MEMORY_WITHOUT_DISK.getPreferredName(), expectedMemoryWithoutDisk.getStringRep());
|
||||
}
|
||||
if (expectedMemoryUsageWithMaxPartitions != null) {
|
||||
builder.field(
|
||||
EXPECTED_MEMORY_USAGE_WITH_MAX_PARTITIONS.getPreferredName(), expectedMemoryUsageWithMaxPartitions.getStringRep());
|
||||
if (expectedMemoryWithDisk != null) {
|
||||
builder.field(EXPECTED_MEMORY_WITH_DISK.getPreferredName(), expectedMemoryWithDisk.getStringRep());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
|
@ -105,12 +100,12 @@ public class EstimateMemoryUsageResponse implements ToXContentObject {
|
|||
}
|
||||
|
||||
EstimateMemoryUsageResponse that = (EstimateMemoryUsageResponse) other;
|
||||
return Objects.equals(expectedMemoryUsageWithOnePartition, that.expectedMemoryUsageWithOnePartition)
|
||||
&& Objects.equals(expectedMemoryUsageWithMaxPartitions, that.expectedMemoryUsageWithMaxPartitions);
|
||||
return Objects.equals(expectedMemoryWithoutDisk, that.expectedMemoryWithoutDisk)
|
||||
&& Objects.equals(expectedMemoryWithDisk, that.expectedMemoryWithDisk);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(expectedMemoryUsageWithOnePartition, expectedMemoryUsageWithMaxPartitions);
|
||||
return Objects.hash(expectedMemoryWithoutDisk, expectedMemoryWithDisk);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.client.ml.DeleteFilterRequest;
|
|||
import org.elasticsearch.client.ml.DeleteForecastRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
|
||||
import org.elasticsearch.client.ml.EstimateMemoryUsageRequest;
|
||||
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
|
||||
import org.elasticsearch.client.ml.FindFileStructureRequest;
|
||||
import org.elasticsearch.client.ml.FindFileStructureRequestTests;
|
||||
|
@ -797,13 +796,13 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testEstimateMemoryUsage() throws IOException {
|
||||
EstimateMemoryUsageRequest estimateRequest = new EstimateMemoryUsageRequest(randomDataFrameAnalyticsConfig());
|
||||
PutDataFrameAnalyticsRequest estimateRequest = new PutDataFrameAnalyticsRequest(randomDataFrameAnalyticsConfig());
|
||||
Request request = MLRequestConverters.estimateMemoryUsage(estimateRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_ml/data_frame/analytics/_estimate_memory_usage", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
EstimateMemoryUsageRequest parsedRequest = EstimateMemoryUsageRequest.fromXContent(parser);
|
||||
assertThat(parsedRequest, equalTo(estimateRequest));
|
||||
DataFrameAnalyticsConfig parsedConfig = DataFrameAnalyticsConfig.fromXContent(parser);
|
||||
assertThat(parsedConfig, equalTo(estimateRequest.getConfig()));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.elasticsearch.client.ml.DeleteForecastRequest;
|
|||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobResponse;
|
||||
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
|
||||
import org.elasticsearch.client.ml.EstimateMemoryUsageRequest;
|
||||
import org.elasticsearch.client.ml.EstimateMemoryUsageResponse;
|
||||
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
|
||||
import org.elasticsearch.client.ml.EvaluateDataFrameResponse;
|
||||
|
@ -1731,8 +1730,8 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
highLevelClient().bulk(bulk1, RequestOptions.DEFAULT);
|
||||
|
||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
EstimateMemoryUsageRequest estimateMemoryUsageRequest =
|
||||
new EstimateMemoryUsageRequest(
|
||||
PutDataFrameAnalyticsRequest estimateMemoryUsageRequest =
|
||||
new PutDataFrameAnalyticsRequest(
|
||||
DataFrameAnalyticsConfig.builder()
|
||||
.setSource(DataFrameAnalyticsSource.builder().setIndex(indexName).build())
|
||||
.setAnalysis(OutlierDetection.createDefault())
|
||||
|
@ -1746,8 +1745,8 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
EstimateMemoryUsageResponse response1 =
|
||||
execute(
|
||||
estimateMemoryUsageRequest, machineLearningClient::estimateMemoryUsage, machineLearningClient::estimateMemoryUsageAsync);
|
||||
assertThat(response1.getExpectedMemoryUsageWithOnePartition(), allOf(greaterThan(lowerBound), lessThan(upperBound)));
|
||||
assertThat(response1.getExpectedMemoryUsageWithMaxPartitions(), allOf(greaterThan(lowerBound), lessThan(upperBound)));
|
||||
assertThat(response1.getExpectedMemoryWithoutDisk(), allOf(greaterThan(lowerBound), lessThan(upperBound)));
|
||||
assertThat(response1.getExpectedMemoryWithDisk(), allOf(greaterThan(lowerBound), lessThan(upperBound)));
|
||||
|
||||
BulkRequest bulk2 = new BulkRequest()
|
||||
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
|
@ -1761,11 +1760,8 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
execute(
|
||||
estimateMemoryUsageRequest, machineLearningClient::estimateMemoryUsage, machineLearningClient::estimateMemoryUsageAsync);
|
||||
assertThat(
|
||||
response2.getExpectedMemoryUsageWithOnePartition(),
|
||||
allOf(greaterThan(response1.getExpectedMemoryUsageWithOnePartition()), lessThan(upperBound)));
|
||||
assertThat(
|
||||
response2.getExpectedMemoryUsageWithMaxPartitions(),
|
||||
allOf(greaterThan(response1.getExpectedMemoryUsageWithMaxPartitions()), lessThan(upperBound)));
|
||||
response2.getExpectedMemoryWithoutDisk(), allOf(greaterThan(response1.getExpectedMemoryWithoutDisk()), lessThan(upperBound)));
|
||||
assertThat(response2.getExpectedMemoryWithDisk(), allOf(greaterThan(response1.getExpectedMemoryWithDisk()), lessThan(upperBound)));
|
||||
}
|
||||
|
||||
public void testPutFilter() throws Exception {
|
||||
|
|
|
@ -452,6 +452,13 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
if (reindexRequest.getRemoteInfo() == null && randomBoolean()) {
|
||||
reindexRequest.setSourceQuery(new TermQueryBuilder("foo", "fooval"));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int slices = randomInt(100);
|
||||
reindexRequest.setSlices(slices);
|
||||
expectedParams.put("slices", String.valueOf(slices));
|
||||
} else {
|
||||
expectedParams.put("slices", "1");
|
||||
}
|
||||
setRandomTimeout(reindexRequest::setTimeout, ReplicationRequest.DEFAULT_TIMEOUT, expectedParams);
|
||||
setRandomWaitForActiveShards(reindexRequest::setWaitForActiveShards, ActiveShardCount.DEFAULT, expectedParams);
|
||||
expectedParams.put("scroll", reindexRequest.getScrollTime().getStringRep());
|
||||
|
|
|
@ -95,6 +95,11 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
private static final String snapshotName = "test_snapshot";
|
||||
private static final String indexName = "test_index";
|
||||
|
||||
@Override
|
||||
protected boolean waitForAllSnapshotsWiped() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void testSnapshotCreateRepository() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -1,68 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigTests;
|
||||
import org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class EstimateMemoryUsageRequestTests extends AbstractXContentTestCase<EstimateMemoryUsageRequest> {
|
||||
|
||||
public static EstimateMemoryUsageRequest randomRequest() {
|
||||
return new EstimateMemoryUsageRequest(DataFrameAnalyticsConfigTests.randomDataFrameAnalyticsConfig());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected EstimateMemoryUsageRequest createTestInstance() {
|
||||
return randomRequest();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected EstimateMemoryUsageRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return EstimateMemoryUsageRequest.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||
return field -> field.contains(".");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NamedXContentRegistry xContentRegistry() {
|
||||
List<NamedXContentRegistry.Entry> namedXContent = new ArrayList<>();
|
||||
namedXContent.addAll(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents());
|
||||
namedXContent.addAll(new MlDataFrameAnalysisNamedXContentProvider().getNamedXContentParsers());
|
||||
return new NamedXContentRegistry(namedXContent);
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ import org.apache.tools.ant.taskdefs.condition.Os
|
|||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.Version
|
||||
import org.elasticsearch.gradle.BwcVersions
|
||||
import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
|
||||
|
@ -42,7 +43,7 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
|
|||
|
||||
File checkoutDir = file("${buildDir}/bwc/checkout-${bwcBranch}")
|
||||
|
||||
final String remote = System.getProperty("tests.bwc.remote", "elastic")
|
||||
final String remote = System.getProperty("bwc.remote", "elastic")
|
||||
|
||||
boolean gitFetchLatest
|
||||
final String gitFetchLatestProperty = System.getProperty("tests.bwc.git_fetch_latest", "true")
|
||||
|
@ -103,8 +104,8 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
|
|||
task checkoutBwcBranch() {
|
||||
dependsOn fetchLatest
|
||||
doLast {
|
||||
String refspec = System.getProperty("tests.bwc.refspec.${bwcBranch}", "${remote}/${bwcBranch}")
|
||||
if (System.getProperty("tests.bwc.checkout.align") != null) {
|
||||
String refspec = System.getProperty("bwc.refspec.${bwcBranch}") ?: System.getProperty("tests.bwc.refspec.${bwcBranch}") ?: "${remote}/${bwcBranch}"
|
||||
if (System.getProperty("bwc.checkout.align") != null || System.getProperty("tests.bwc.checkout.align") != null) {
|
||||
/*
|
||||
We use a time based approach to make the bwc versions built deterministic and compatible with the current hash.
|
||||
Most of the time we want to test against latest, but when running delayed exhaustive tests or wanting
|
||||
|
@ -145,6 +146,7 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
|
|||
spec.workingDir = checkoutDir
|
||||
spec.commandLine "git", "checkout", refspec
|
||||
}
|
||||
file("${project.buildDir}/refspec").text = GlobalBuildInfoPlugin.gitRevision(checkoutDir)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -217,6 +219,9 @@ bwcVersions.forPreviousUnreleased { BwcVersions.UnreleasedVersionInfo unreleased
|
|||
|
||||
Closure createBuildBwcTask = { projectName, projectDir, projectArtifact ->
|
||||
Task bwcTask = createRunBwcGradleTask(buildBwcTaskName(projectName)) {
|
||||
inputs.file("${project.buildDir}/refspec")
|
||||
outputs.files(projectArtifact)
|
||||
outputs.cacheIf { true }
|
||||
args ":${projectDir.replace('/', ':')}:assemble"
|
||||
doLast {
|
||||
if (projectArtifact.exists() == false) {
|
||||
|
|
|
@ -59,6 +59,9 @@ testClusters.integTest {
|
|||
extraConfigFile 'hunspell/en_US/en_US.dic', project(":server").file('src/test/resources/indices/analyze/conf_dir/hunspell/en_US/en_US.dic')
|
||||
// Whitelist reindexing from the local node so we can test it.
|
||||
setting 'reindex.remote.whitelist', '127.0.0.1:*'
|
||||
|
||||
// TODO: remove this once cname is prepended to transport.publish_address by default in 8.0
|
||||
systemProperty 'es.transport.cname_in_publish_address', 'true'
|
||||
}
|
||||
|
||||
// build the cluster with all plugins
|
||||
|
|
|
@ -48,7 +48,7 @@ PUT /seats
|
|||
"mappings": {
|
||||
"properties": {
|
||||
"theatre": { "type": "keyword" },
|
||||
"play": { "type": "text" },
|
||||
"play": { "type": "keyword" },
|
||||
"actors": { "type": "text" },
|
||||
"row": { "type": "integer" },
|
||||
"number": { "type": "integer" },
|
||||
|
@ -72,7 +72,7 @@ seat data is indexed.
|
|||
+
|
||||
[source,js]
|
||||
----
|
||||
curl -XPOST localhost:9200/seats/seat/_bulk?pipeline=seats -H "Content-Type: application/x-ndjson" --data-binary "@/<local-file-path>/seats.json"
|
||||
curl -XPOST "localhost:9200/seats/_bulk?pipeline=seats" -H "Content-Type: application/x-ndjson" --data-binary "@/<local-file-path>/seats.json"
|
||||
----
|
||||
// NOTCONSOLE
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ all available theatre seats for evening performances that are under $18.
|
|||
|
||||
[source,js]
|
||||
----
|
||||
GET evening/_search
|
||||
GET seats/_search
|
||||
{
|
||||
"query": {
|
||||
"bool" : {
|
||||
|
|
|
@ -18,7 +18,7 @@ The standard <<painless-api-reference, Painless API>> is available.
|
|||
|
||||
*Example*
|
||||
|
||||
[source,Painless]
|
||||
[source,js]
|
||||
----
|
||||
POST _watcher/watch/_execute
|
||||
{
|
||||
|
@ -65,6 +65,8 @@ POST _watcher/watch/_execute
|
|||
}
|
||||
}
|
||||
----
|
||||
// CONSOLE
|
||||
// TEST[skip: requires setup from other pages]
|
||||
|
||||
<1> The Java Stream API is used in the condition. This API allows manipulation of
|
||||
the elements of the list in a pipeline.
|
||||
|
@ -76,7 +78,7 @@ on the value of the seats sold for the plays in the data set. The script aggrega
|
|||
the total sold seats for each play and returns true if there is at least one play
|
||||
that has sold over $50,000.
|
||||
|
||||
[source,Painless]
|
||||
[source,js]
|
||||
----
|
||||
POST _watcher/watch/_execute
|
||||
{
|
||||
|
@ -121,6 +123,8 @@ POST _watcher/watch/_execute
|
|||
}
|
||||
}
|
||||
----
|
||||
// CONSOLE
|
||||
// TEST[skip: requires setup from other pages]
|
||||
|
||||
This example uses a nearly identical condition as the previous example. The
|
||||
differences below are subtle and are worth calling out.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
[source,Painless]
|
||||
[source,js]
|
||||
----
|
||||
POST _watcher/watch/_execute
|
||||
{
|
||||
|
@ -99,10 +99,12 @@ POST _watcher/watch/_execute
|
|||
}
|
||||
}
|
||||
----
|
||||
// CONSOLE
|
||||
// TEST[skip: requires setup from other pages]
|
||||
|
||||
The following example shows the use of metadata and transforming dates into a readable format.
|
||||
|
||||
[source,Painless]
|
||||
[source,js]
|
||||
----
|
||||
POST _watcher/watch/_execute
|
||||
{
|
||||
|
@ -155,3 +157,5 @@ POST _watcher/watch/_execute
|
|||
}
|
||||
}
|
||||
----
|
||||
// CONSOLE
|
||||
// TEST[skip: requires setup from other pages]
|
|
@ -18,7 +18,7 @@ The standard <<painless-api-reference, Painless API>> is available.
|
|||
|
||||
*Example*
|
||||
|
||||
[source,Painless]
|
||||
[source,js]
|
||||
----
|
||||
POST _watcher/watch/_execute
|
||||
{
|
||||
|
@ -75,6 +75,8 @@ POST _watcher/watch/_execute
|
|||
}
|
||||
}
|
||||
----
|
||||
// CONSOLE
|
||||
// TEST[skip: requires setup from other pages]
|
||||
|
||||
<1> The Java Stream API is used in the transform. This API allows manipulation of
|
||||
the elements of the list in a pipeline.
|
||||
|
@ -86,7 +88,7 @@ the elements of the list in a pipeline.
|
|||
The following action transform changes each value in the mod_log action into a `String`.
|
||||
This transform does not change the values in the unmod_log action.
|
||||
|
||||
[source,Painless]
|
||||
[source,js]
|
||||
----
|
||||
POST _watcher/watch/_execute
|
||||
{
|
||||
|
@ -140,6 +142,8 @@ POST _watcher/watch/_execute
|
|||
}
|
||||
}
|
||||
----
|
||||
// CONSOLE
|
||||
// TEST[skip: requires setup from other pages]
|
||||
|
||||
This example uses the streaming API in a very similar manner. The differences below are
|
||||
subtle and worth calling out.
|
||||
|
|
|
@ -143,7 +143,7 @@ the first bucket you'll get will be the one with `100` as its key. This is confu
|
|||
to get those buckets between `0 - 100`.
|
||||
|
||||
With `extended_bounds` setting, you now can "force" the histogram aggregation to start building buckets on a specific
|
||||
`min` values and also keep on building buckets up to a `max` value (even if there are no documents anymore). Using
|
||||
`min` value and also keep on building buckets up to a `max` value (even if there are no documents anymore). Using
|
||||
`extended_bounds` only makes sense when `min_doc_count` is 0 (the empty buckets will never be returned if `min_doc_count`
|
||||
is greater than 0).
|
||||
|
||||
|
@ -185,8 +185,10 @@ the `order` setting. Supports the same `order` functionality as the <<search-agg
|
|||
|
||||
==== Offset
|
||||
|
||||
By default the bucket keys start with 0 and then continue in even spaced steps of `interval`, e.g. if the interval is 10 the first buckets
|
||||
(assuming there is data inside them) will be `[0, 10)`, `[10, 20)`, `[20, 30)`. The bucket boundaries can be shifted by using the `offset` option.
|
||||
By default the bucket keys start with 0 and then continue in even spaced steps
|
||||
of `interval`, e.g. if the interval is `10`, the first three buckets (assuming
|
||||
there is data inside them) will be `[0, 10)`, `[10, 20)`, `[20, 30)`. The bucket
|
||||
boundaries can be shifted by using the `offset` option.
|
||||
|
||||
This can be best illustrated with an example. If there are 10 documents with values ranging from 5 to 14, using interval `10` will result in
|
||||
two buckets with 5 documents each. If an additional offset `5` is used, there will be only one single bucket `[5, 15)` containing all the 10
|
||||
|
|
|
@ -1,28 +1,75 @@
|
|||
[[cluster-allocation-explain]]
|
||||
=== Cluster Allocation Explain API
|
||||
|
||||
Provides explanations for shard allocations in the cluster.
|
||||
|
||||
|
||||
[[cluster-allocation-explain-api-request]]
|
||||
==== {api-request-title}
|
||||
|
||||
`GET /_cluster/allocation/explain`
|
||||
|
||||
|
||||
[[cluster-allocation-explain-api-desc]]
|
||||
==== {api-description-title}
|
||||
|
||||
The purpose of the cluster allocation explain API is to provide
|
||||
explanations for shard allocations in the cluster. For unassigned shards,
|
||||
the explain API provides an explanation for why the shard is unassigned.
|
||||
For assigned shards, the explain API provides an explanation for why the
|
||||
shard is remaining on its current node and has not moved or rebalanced to
|
||||
another node. This API can be very useful when attempting to diagnose why
|
||||
a shard is unassigned or why a shard continues to remain on its current node
|
||||
when you might expect otherwise.
|
||||
another node. This API can be very useful when attempting to diagnose why a
|
||||
shard is unassigned or why a shard continues to remain on its current node when
|
||||
you might expect otherwise.
|
||||
|
||||
[float]
|
||||
==== Explain API Request
|
||||
|
||||
To explain the allocation of a shard, first an index should exist:
|
||||
[[cluster-allocation-explain-api-query-params]]
|
||||
==== {api-query-parms-title}
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT /myindex
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
`include_disk_info`::
|
||||
(Optional, boolean) If `true`, returns information about disk usage and
|
||||
shard sizes. Defaults to `false`.
|
||||
|
||||
`include_yes_decisions`::
|
||||
(Optional, boolean) If `true`, returns 'YES' decisions in explanation.
|
||||
Defaults to `false`.
|
||||
|
||||
|
||||
[[cluster-allocation-explain-api-request-body]]
|
||||
==== {api-request-body-title}
|
||||
|
||||
`current_node`::
|
||||
(Optional, string) Specifies the node ID or the name of the node to only
|
||||
explain a shard that is currently located on the specified node.
|
||||
|
||||
`index`::
|
||||
(Optional, string) Specifies the name of the index that you would like an
|
||||
explanation for.
|
||||
|
||||
`primary`::
|
||||
(Optional, boolean) If `true`, returns explanation for the primary shard
|
||||
for the given shard ID.
|
||||
|
||||
`shard`::
|
||||
(Optional, integer) Specifies the ID of the shard that you would like an
|
||||
explanation for.
|
||||
|
||||
You can also have {es} explain the allocation of the first unassigned shard that
|
||||
it finds by sending an empty body for the request.
|
||||
|
||||
|
||||
[[cluster-allocation-explain-api-examples]]
|
||||
==== {api-examples-title}
|
||||
|
||||
|
||||
//////
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT /myindex
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TESTSETUP
|
||||
|
||||
And then the allocation for shards of that index can be explained:
|
||||
//////
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -35,14 +82,8 @@ GET /_cluster/allocation/explain
|
|||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
Specify the `index` and `shard` id of the shard you would like an explanation
|
||||
for, as well as the `primary` flag to indicate whether to explain the primary
|
||||
shard for the given shard id or one of its replica shards. These three request
|
||||
parameters are required.
|
||||
|
||||
You may also specify an optional `current_node` request parameter to only explain
|
||||
a shard that is currently located on `current_node`. The `current_node` can be
|
||||
specified as either the node id or node name.
|
||||
===== Example of the current_node parameter
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -58,22 +99,8 @@ GET /_cluster/allocation/explain
|
|||
// TEST[skip:no way of knowing the current_node]
|
||||
<1> The node where shard 0 currently has a replica on
|
||||
|
||||
You can also have Elasticsearch explain the allocation of the first unassigned
|
||||
shard that it finds by sending an empty body for the request:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_cluster/allocation/explain
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
[float]
|
||||
==== Explain API Response
|
||||
|
||||
This section includes examples of the cluster allocation explain API response output
|
||||
under various scenarios.
|
||||
|
||||
//////////////////////////
|
||||
===== Examples of unassigned primary shard explanations
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -89,9 +116,8 @@ GET /_cluster/allocation/explain
|
|||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
//////////////////////////
|
||||
|
||||
The API response for an unassigned shard:
|
||||
The API returns the following response for an unassigned primary shard:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -131,36 +157,13 @@ The API response for an unassigned shard:
|
|||
// TESTRESPONSE[s/"transport_address" : "[^"]*"/"transport_address" : $body.$_path/]
|
||||
// TESTRESPONSE[s/"node_attributes" : \{\}/"node_attributes" : $body.$_path/]
|
||||
|
||||
<1> The current state of the shard
|
||||
<2> The reason for the shard originally becoming unassigned
|
||||
<3> Whether to allocate the shard
|
||||
<4> Whether to allocate the shard to the particular node
|
||||
<5> The decider which led to the `no` decision for the node
|
||||
<6> An explanation as to why the decider returned a `no` decision, with a helpful hint pointing to the setting that led to the decision
|
||||
<1> The current state of the shard.
|
||||
<2> The reason for the shard originally becoming unassigned.
|
||||
<3> Whether to allocate the shard.
|
||||
<4> Whether to allocate the shard to the particular node.
|
||||
<5> The decider which led to the `no` decision for the node.
|
||||
<6> An explanation as to why the decider returned a `no` decision, with a helpful hint pointing to the setting that led to the decision.
|
||||
|
||||
You can return information gathered by the cluster info service about disk usage
|
||||
and shard sizes by setting the `include_disk_info` parameter to `true`:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_cluster/allocation/explain?include_disk_info=true
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
Additionally, if you would like to include all decisions that were factored into the final
|
||||
decision, the `include_yes_decisions` parameter will return all decisions for each node:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_cluster/allocation/explain?include_yes_decisions=true
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
The default value for `include_yes_decisions` is `false`, which will only
|
||||
include the `no` decisions in the response. This is generally what you would
|
||||
want, as the `no` decisions indicate why a shard is unassigned or cannot be moved,
|
||||
and including all decisions include the `yes` ones adds a lot of verbosity to the
|
||||
API's response output.
|
||||
|
||||
The API response output for an unassigned primary shard that had previously been
|
||||
allocated to a node in the cluster:
|
||||
|
@ -184,7 +187,11 @@ allocated to a node in the cluster:
|
|||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
|
||||
The API response output for a replica that is unassigned due to delayed allocation:
|
||||
|
||||
===== Example of an unassigned replica shard explanation
|
||||
|
||||
The API response output for a replica that is unassigned due to delayed
|
||||
allocation:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -233,12 +240,15 @@ The API response output for a replica that is unassigned due to delayed allocati
|
|||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
<1> The configured delay before allocating a replica shard that does not exist due to the node holding it leaving the cluster
|
||||
<2> The remaining delay before allocating the replica shard
|
||||
<3> Information about the shard data found on a node
|
||||
<1> The configured delay before allocating a replica shard that does not exist due to the node holding it leaving the cluster.
|
||||
<2> The remaining delay before allocating the replica shard.
|
||||
<3> Information about the shard data found on a node.
|
||||
|
||||
The API response output for an assigned shard that is not allowed to
|
||||
remain on its current node and is required to move:
|
||||
|
||||
===== Examples of allocated shard explanations
|
||||
|
||||
The API response output for an assigned shard that is not allowed to remain on
|
||||
its current node and is required to move:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -281,9 +291,10 @@ remain on its current node and is required to move:
|
|||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
<1> Whether the shard is allowed to remain on its current node
|
||||
<2> The deciders that factored into the decision of why the shard is not allowed to remain on its current node
|
||||
<3> Whether the shard is allowed to be allocated to another node
|
||||
<1> Whether the shard is allowed to remain on its current node.
|
||||
<2> The deciders that factored into the decision of why the shard is not allowed to remain on its current node.
|
||||
<3> Whether the shard is allowed to be allocated to another node.
|
||||
|
||||
|
||||
The API response output for an assigned shard that remains on its current node
|
||||
because moving the shard to another node does not form a better cluster balance:
|
||||
|
@ -317,6 +328,6 @@ because moving the shard to another node does not form a better cluster balance:
|
|||
}
|
||||
--------------------------------------------------
|
||||
// NOTCONSOLE
|
||||
<1> Whether rebalancing is allowed on the cluster
|
||||
<2> Whether the shard can be rebalanced to another node
|
||||
<3> The reason the shard cannot be rebalanced to the node, in this case indicating that it offers no better balance than the current node
|
||||
<1> Whether rebalancing is allowed on the cluster.
|
||||
<2> Whether the shard can be rebalanced to another node.
|
||||
<3> The reason the shard cannot be rebalanced to the node, in this case indicating that it offers no better balance than the current node.
|
||||
|
|
|
@ -1,29 +1,65 @@
|
|||
[[cluster-nodes-hot-threads]]
|
||||
=== Nodes hot_threads
|
||||
|
||||
Returns the hot threads on each selected node in the cluster.
|
||||
|
||||
|
||||
[[cluster-nodes-hot-threads-api-request]]
|
||||
==== {api-request-title}
|
||||
|
||||
`GET /_nodes/hot_threads` +
|
||||
|
||||
`GET /_nodes/{node_id}/hot_threads`
|
||||
|
||||
|
||||
[[cluster-nodes-hot-threads-api-desc]]
|
||||
==== {api-description-title}
|
||||
|
||||
This API yields a breakdown of the hot threads on each selected node in the
|
||||
cluster. Its endpoints are `/_nodes/hot_threads` and
|
||||
`/_nodes/{nodes}/hot_threads`:
|
||||
cluster. The output is plain text with a breakdown of each node's top hot
|
||||
threads.
|
||||
|
||||
|
||||
[[cluster-nodes-hot-threads-api-path-params]]
|
||||
==== {api-path-parms-title}
|
||||
|
||||
include::{docdir}/rest-api/common-parms.asciidoc[tag=node-id]
|
||||
|
||||
|
||||
[[cluster-nodes-hot-threads-api-query-params]]
|
||||
==== {api-query-parms-title}
|
||||
|
||||
|
||||
`ignore_idle_threads`::
|
||||
(Optional, boolean) If true, known idle threads (e.g. waiting in a socket
|
||||
select, or to get a task from an empty queue) are filtered out. Defaults to
|
||||
true.
|
||||
|
||||
`interval`::
|
||||
(Optional, <<time-units, time units>>) The interval to do the second
|
||||
sampling of threads. Defaults to `500ms`.
|
||||
|
||||
`snapshots`::
|
||||
(Optional, integer) Number of samples of thread stacktrace. Defaults to
|
||||
`10`.
|
||||
|
||||
`threads`::
|
||||
(Optional, integer) Specifies the number of hot threads to provide
|
||||
information for. Defaults to `3`.
|
||||
|
||||
include::{docdir}/rest-api/common-parms.asciidoc[tag=timeoutparms]
|
||||
|
||||
`type`::
|
||||
(Optional, string) The type to sample. Available options are `block`, `cpu`, and
|
||||
`wait`. Defaults to `cpu`.
|
||||
|
||||
|
||||
[[cluster-nodes-hot-threads-api-example]]
|
||||
==== {api-examples-title}
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_nodes/hot_threads
|
||||
GET /_nodes/nodeId1,nodeId2/hot_threads
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
The first command gets the hot threads of all the nodes in the cluster. The
|
||||
second command gets the hot threads of only `nodeId1` and `nodeId2`. Nodes can
|
||||
be selected using <<cluster-nodes,node filters>>.
|
||||
|
||||
The output is plain text with a breakdown of each node's top hot threads. The
|
||||
allowed parameters are:
|
||||
|
||||
[horizontal]
|
||||
`threads`:: number of hot threads to provide, defaults to 3.
|
||||
`interval`:: the interval to do the second sampling of threads.
|
||||
Defaults to 500ms.
|
||||
`type`:: The type to sample, defaults to cpu, but supports wait and
|
||||
block to see hot threads that are in wait or block state.
|
||||
`ignore_idle_threads`:: If true, known idle threads (e.g. waiting in a socket select, or to
|
||||
get a task from an empty queue) are filtered out. Defaults to true.
|
||||
// CONSOLE
|
|
@ -3,12 +3,52 @@
|
|||
|
||||
beta[The Task Management API is new and should still be considered a beta feature. The API may change in ways that are not backwards compatible]
|
||||
|
||||
[float]
|
||||
==== Current Tasks Information
|
||||
Returns information about the tasks currently executing in the cluster.
|
||||
|
||||
[[tasks-api-request]]
|
||||
==== {api-request-title}
|
||||
|
||||
`GET /_tasks` +
|
||||
|
||||
`GET /_tasks/{task_id}`
|
||||
|
||||
|
||||
[[tasks-api-desc]]
|
||||
==== {api-description-title}
|
||||
|
||||
The task management API allows to retrieve information about the tasks currently
|
||||
executing on one or more nodes in the cluster.
|
||||
|
||||
|
||||
[[tasks-api-path-params]]
|
||||
==== {api-path-parms-title}
|
||||
|
||||
{task_id}
|
||||
(Optional, string) The ID of the task to return (`node_id:task_number`).
|
||||
|
||||
|
||||
[[tasks-api-query-params]]
|
||||
==== {api-query-parms-title}
|
||||
|
||||
include::{docdir}/rest-api/common-parms.asciidoc[tag=timeoutparms]
|
||||
|
||||
`wait_for_completion`::
|
||||
(Optional, boolean) If `true`, it waits for the matching tasks to complete.
|
||||
Defaults to `false`.
|
||||
|
||||
|
||||
|
||||
[[tasks-api-response-codes]]
|
||||
==== {api-response-codes-title}
|
||||
|
||||
`404` (Missing resources)::
|
||||
If `{task_id}` is specified but not found, this code indicates that there
|
||||
are no resources that match the request.
|
||||
|
||||
|
||||
[[tasks-api-examples]]
|
||||
==== {api-examples-title}
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _tasks <1>
|
||||
|
@ -22,7 +62,7 @@ GET _tasks?nodes=nodeId1,nodeId2&actions=cluster:* <3>
|
|||
<2> Retrieves all tasks running on nodes `nodeId1` and `nodeId2`. See <<cluster-nodes>> for more info about how to select individual nodes.
|
||||
<3> Retrieves all cluster-related tasks running on nodes `nodeId1` and `nodeId2`.
|
||||
|
||||
The result will look similar to the following:
|
||||
The API returns the following result:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -60,6 +100,8 @@ The result will look similar to the following:
|
|||
--------------------------------------------------
|
||||
// TESTRESPONSE
|
||||
|
||||
===== Retrieve information from a particular task
|
||||
|
||||
It is also possible to retrieve information for a particular task. The following
|
||||
example retrieves information about task `oTUltX4IQMOUUVeiohTt8A:124`:
|
||||
|
||||
|
@ -82,6 +124,9 @@ GET _tasks?parent_task_id=oTUltX4IQMOUUVeiohTt8A:123
|
|||
|
||||
If the parent isn't found, the API does not return a 404.
|
||||
|
||||
|
||||
===== Get more information about tasks
|
||||
|
||||
You can also use the `detailed` request parameter to get more information about
|
||||
the running tasks. This is useful for telling one task from another but is more
|
||||
costly to execute. For example, fetching all searches using the `detailed`
|
||||
|
@ -94,7 +139,7 @@ GET _tasks?actions=*search&detailed
|
|||
// CONSOLE
|
||||
// TEST[skip:No tasks to retrieve]
|
||||
|
||||
The results might look like:
|
||||
The API returns the following result:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -145,6 +190,9 @@ releases.
|
|||
|
||||
==============================
|
||||
|
||||
|
||||
===== Wait for completion
|
||||
|
||||
The task API can also be used to wait for completion of a particular task. The
|
||||
following call will block for 10 seconds or until the task with id
|
||||
`oTUltX4IQMOUUVeiohTt8A:12345` is completed.
|
||||
|
@ -156,8 +204,8 @@ GET _tasks/oTUltX4IQMOUUVeiohTt8A:12345?wait_for_completion=true&timeout=10s
|
|||
// CONSOLE
|
||||
// TEST[catch:missing]
|
||||
|
||||
You can also wait for all tasks for certain action types to finish. This
|
||||
command will wait for all `reindex` tasks to finish:
|
||||
You can also wait for all tasks for certain action types to finish. This command
|
||||
will wait for all `reindex` tasks to finish:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -165,6 +213,9 @@ GET _tasks?actions=*reindex&wait_for_completion=true&timeout=10s
|
|||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
|
||||
===== Listing tasks by using _cat
|
||||
|
||||
Tasks can be also listed using _cat version of the list tasks command, which
|
||||
accepts the same arguments as the standard list tasks command.
|
||||
|
||||
|
@ -175,9 +226,8 @@ GET _cat/tasks?detailed
|
|||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
[float]
|
||||
[[task-cancellation]]
|
||||
==== Task Cancellation
|
||||
===== Task Cancellation
|
||||
|
||||
If a long-running task supports cancellation, it can be cancelled with the cancel
|
||||
tasks API. The following example cancels task `oTUltX4IQMOUUVeiohTt8A:12345`:
|
||||
|
@ -188,8 +238,9 @@ POST _tasks/oTUltX4IQMOUUVeiohTt8A:12345/_cancel
|
|||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
The task cancellation command supports the same task selection parameters as the list tasks command, so multiple tasks
|
||||
can be cancelled at the same time. For example, the following command will cancel all reindex tasks running on the
|
||||
The task cancellation command supports the same task selection parameters as the
|
||||
list tasks command, so multiple tasks can be cancelled at the same time. For
|
||||
example, the following command will cancel all reindex tasks running on the
|
||||
nodes `nodeId1` and `nodeId2`.
|
||||
|
||||
[source,js]
|
||||
|
@ -198,11 +249,11 @@ POST _tasks/_cancel?nodes=nodeId1,nodeId2&actions=*reindex
|
|||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
[float]
|
||||
==== Task Grouping
|
||||
===== Task Grouping
|
||||
|
||||
The task lists returned by task API commands can be grouped either by nodes (default) or by parent tasks using the `group_by` parameter.
|
||||
The following command will change the grouping to parent tasks:
|
||||
The task lists returned by task API commands can be grouped either by nodes
|
||||
(default) or by parent tasks using the `group_by` parameter. The following
|
||||
command will change the grouping to parent tasks:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -218,12 +269,13 @@ GET _tasks?group_by=none
|
|||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
[float]
|
||||
==== Identifying running tasks
|
||||
|
||||
The `X-Opaque-Id` header, when provided on the HTTP request header, is going to be returned as a header in the response as well as
|
||||
in the `headers` field for in the task information. This allows to track certain calls, or associate certain tasks with
|
||||
a the client that started them:
|
||||
===== Identifying running tasks
|
||||
|
||||
The `X-Opaque-Id` header, when provided on the HTTP request header, is going to
|
||||
be returned as a header in the response as well as in the `headers` field for in
|
||||
the task information. This allows to track certain calls, or associate certain
|
||||
tasks with a the client that started them:
|
||||
|
||||
[source,sh]
|
||||
--------------------------------------------------
|
||||
|
@ -231,7 +283,7 @@ curl -i -H "X-Opaque-Id: 123456" "http://localhost:9200/_tasks?group_by=parents"
|
|||
--------------------------------------------------
|
||||
//NOTCONSOLE
|
||||
|
||||
The result will look similar to the following:
|
||||
The API returns the following result:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -7,21 +7,17 @@
|
|||
Adds or removes master-eligible nodes from the
|
||||
<<modules-discovery-voting,voting configuration exclusion list>>.
|
||||
|
||||
[float]
|
||||
==== Request
|
||||
|
||||
`POST _cluster/voting_config_exclusions/<node_name>` +
|
||||
[[voting-config-exclusions-api-request]]
|
||||
==== {api-request-title}
|
||||
|
||||
`POST _cluster/voting_config_exclusions/{node_name}` +
|
||||
|
||||
`DELETE _cluster/voting_config_exclusions`
|
||||
|
||||
[float]
|
||||
==== Path parameters
|
||||
|
||||
`node_name`::
|
||||
A <<cluster-nodes,node filter>> that identifies {es} nodes.
|
||||
|
||||
[float]
|
||||
==== Description
|
||||
[[voting-config-exclusions-api-desc]]
|
||||
==== {api-description-title}
|
||||
|
||||
By default, if there are more than three master-eligible nodes in the cluster
|
||||
and you remove fewer than half of the master-eligible nodes in the cluster at
|
||||
|
@ -44,23 +40,35 @@ master-eligible nodes from a cluster in a short time period. They are not
|
|||
required when removing master-ineligible nodes or fewer than half of the
|
||||
master-eligible nodes.
|
||||
|
||||
The <<modules-discovery-settings,`cluster.max_voting_config_exclusions`
|
||||
setting>> limits the size of the voting configuration exclusion list. The
|
||||
default value is `10`. Since voting configuration exclusions are persistent and
|
||||
limited in number, you must clear the voting config exclusions list once the
|
||||
exclusions are no longer required.
|
||||
|
||||
There is also a
|
||||
<<modules-discovery-settings,`cluster.auto_shrink_voting_configuration` setting>>,
|
||||
which is set to true by default. If it is set to false, you must use this API to
|
||||
maintain the voting configuration.
|
||||
|
||||
For more information, see <<modules-discovery-removing-nodes>>.
|
||||
|
||||
[float]
|
||||
==== Examples
|
||||
|
||||
[[voting-config-exclusions-api-path-params]]
|
||||
==== {api-path-parms-title}
|
||||
|
||||
`{node_name}`::
|
||||
A <<cluster-nodes,node filter>> that identifies {es} nodes.
|
||||
|
||||
|
||||
[[voting-config-exclusions-api-query-params]]
|
||||
==== {api-query-parms-title}
|
||||
|
||||
`cluster.auto_shrink_voting_configuration`::
|
||||
(Optional, boolean) If `false`, you must use this API to maintain the voting
|
||||
configuration. Defaults to `true`.
|
||||
|
||||
`cluster.max_voting_config_exclusions`::
|
||||
(Optional, integer) Limits the size of the voting configuration exclusion
|
||||
list. The default value is `10`. Since voting configuration exclusions are
|
||||
persistent and limited in number, you must clear the voting config
|
||||
exclusions list once the exclusions are no longer required.
|
||||
|
||||
|
||||
[[voting-config-exclusions-api-example]]
|
||||
==== {api-examples-title}
|
||||
|
||||
Add `nodeId1` to the voting configuration exclusions list:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST /_cluster/voting_config_exclusions/nodeId1
|
||||
|
@ -68,7 +76,9 @@ POST /_cluster/voting_config_exclusions/nodeId1
|
|||
// CONSOLE
|
||||
// TEST[catch:bad_request]
|
||||
|
||||
|
||||
Remove all exclusions from the list:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE /_cluster/voting_config_exclusions
|
||||
|
|
|
@ -87,7 +87,7 @@ tar -xvf elasticsearch-{version}-darwin-x86_64.tar.gz
|
|||
+
|
||||
Windows PowerShell:
|
||||
+
|
||||
["source","sh",subs="attributes,callouts"]
|
||||
["source","powershell",subs="attributes,callouts"]
|
||||
--------------------------------------------------
|
||||
Expand-Archive elasticsearch-{version}-windows-x86_64.zip
|
||||
--------------------------------------------------
|
||||
|
@ -104,10 +104,10 @@ cd elasticsearch-{version}/bin
|
|||
+
|
||||
Windows:
|
||||
+
|
||||
["source","sh",subs="attributes,callouts"]
|
||||
["source","powershell",subs="attributes,callouts"]
|
||||
--------------------------------------------------
|
||||
cd %PROGRAMFILES%\Elastic\Elasticsearch\bin
|
||||
.\elasticsearch.exe
|
||||
cd elasticsearch-{version}\bin
|
||||
.\elasticsearch.bat
|
||||
--------------------------------------------------
|
||||
+
|
||||
You now have a single-node {es} cluster up and running!
|
||||
|
@ -126,10 +126,10 @@ Linux and macOS:
|
|||
+
|
||||
Windows:
|
||||
+
|
||||
["source","sh",subs="attributes,callouts"]
|
||||
["source","powershell",subs="attributes,callouts"]
|
||||
--------------------------------------------------
|
||||
.\elasticsearch.exe -Epath.data=data2 -Epath.logs=log2
|
||||
.\elasticsearch.exe -Epath.data=data3 -Epath.logs=log3
|
||||
.\elasticsearch.bat -E path.data=data2 -E path.logs=log2
|
||||
.\elasticsearch.bat -E path.data=data3 -E path.logs=log3
|
||||
--------------------------------------------------
|
||||
+
|
||||
The additional nodes are assigned unique IDs. Because you're running all three
|
||||
|
@ -185,7 +185,7 @@ packages on Linux, install using Homebrew on macOS, or install using the MSI
|
|||
package installer on Windows. See <<install-elasticsearch>> for more information.
|
||||
|
||||
[[getting-started-index]]
|
||||
=== Index some documents
|
||||
== Index some documents
|
||||
|
||||
Once you have a cluster up and running, you're ready to index some data.
|
||||
There are a variety of ingest options for {es}, but in the end they all
|
||||
|
@ -267,7 +267,7 @@ and shows the original source fields that were indexed.
|
|||
|
||||
[float]
|
||||
[[getting-started-batch-processing]]
|
||||
==== Batch processing
|
||||
=== Batch processing
|
||||
|
||||
In addition to being able to index, update, and delete individual documents, Elasticsearch also provides the ability to perform any of the above operations in batches using the {ref}/docs-bulk.html[`_bulk` API]. This functionality is important in that it provides a very efficient mechanism to do multiple operations as fast as possible with as few network roundtrips as possible.
|
||||
|
||||
|
@ -300,7 +300,7 @@ Note above that for the delete action, there is no corresponding source document
|
|||
The Bulk API does not fail due to failures in one of the actions. If a single action fails for whatever reason, it will continue to process the remainder of the actions after it. When the bulk API returns, it will provide a status for each action (in the same order it was sent in) so that you can check if a specific action failed or not.
|
||||
|
||||
[float]
|
||||
==== Sample dataset
|
||||
=== Sample dataset
|
||||
|
||||
Now that we've gotten a glimpse of the basics, let's try to work on a more realistic dataset. I've prepared a sample of fictitious JSON documents of customer bank account information. Each document has the following schema:
|
||||
|
||||
|
@ -358,7 +358,7 @@ yellow open bank l7sSYV2cQXmu6_4rJWVIww 5 1 1000 0 12
|
|||
Which means that we just successfully bulk indexed 1000 documents into the bank index.
|
||||
|
||||
[[getting-started-search]]
|
||||
=== Start searching
|
||||
== Start searching
|
||||
|
||||
Now let's start with some simple searches. There are two basic ways to run searches: one is by sending search parameters through the {ref}/search-uri-request.html[REST request URI] and the other by sending them through the {ref}/search-request-body.html[REST request body]. The request body method allows you to be more expressive and also to define your searches in a more readable JSON format. We'll try one example of the request URI method but for the remainder of this tutorial, we will exclusively be using the request body method.
|
||||
|
||||
|
@ -501,7 +501,7 @@ It is important to understand that once you get your search results back, Elasti
|
|||
|
||||
[float]
|
||||
[[getting-started-query-lang]]
|
||||
==== Introducing the Query Language
|
||||
=== Introducing the Query Language
|
||||
|
||||
Elasticsearch provides a JSON-style domain-specific language that you can use to execute queries. This is referred to as the {ref}/query-dsl.html[Query DSL]. The query language is quite comprehensive and can be intimidating at first glance but the best way to actually learn it is to start with a few basic examples.
|
||||
|
||||
|
@ -724,7 +724,7 @@ GET /bank/_search
|
|||
|
||||
[float]
|
||||
[[getting-started-filters]]
|
||||
==== Executing filters
|
||||
=== Executing filters
|
||||
|
||||
In the previous section, we skipped over a little detail called the document score (`_score` field in the search results). The score is a numeric value that is a relative measure of how well the document matches the search query that we specified. The higher the score, the more relevant the document is, the lower the score, the less relevant the document is.
|
||||
|
||||
|
@ -761,7 +761,7 @@ Dissecting the above, the bool query contains a `match_all` query (the query par
|
|||
In addition to the `match_all`, `match`, `bool`, and `range` queries, there are a lot of other query types that are available and we won't go into them here. Since we already have a basic understanding of how they work, it shouldn't be too difficult to apply this knowledge in learning and experimenting with the other query types.
|
||||
|
||||
[[getting-started-aggregations]]
|
||||
=== Analyze results with aggregations
|
||||
== Analyze results with aggregations
|
||||
|
||||
Aggregations provide the ability to group and extract statistics from your data. The easiest way to think about aggregations is by roughly equating it to the SQL GROUP BY and the SQL aggregate functions. In Elasticsearch, you have the ability to execute searches returning hits and at the same time return aggregated results separate from the hits all in one response. This is very powerful and efficient in the sense that you can run queries and multiple aggregations and get the results back of both (or either) operations in one shot avoiding network roundtrips using a concise and simplified API.
|
||||
|
||||
|
@ -965,7 +965,7 @@ GET /bank/_search
|
|||
There are many other aggregations capabilities that we won't go into detail here. The {ref}/search-aggregations.html[aggregations reference guide] is a great starting point if you want to do further experimentation.
|
||||
|
||||
[[getting-started-next-steps]]
|
||||
=== Where to go from here
|
||||
== Where to go from here
|
||||
|
||||
Now that you've set up a cluster, indexed some documents, and run some
|
||||
searches and aggregations, you might want to:
|
||||
|
|
|
@ -66,8 +66,11 @@ index is rolled over, then `min_age` is the time elapsed from the time the index
|
|||
is rolled over. The intention here is to execute following phases and actions
|
||||
relative to when data was written last to a rolled over index.
|
||||
|
||||
The previous phase's actions must complete before {ilm} will check `min_age`
|
||||
and transition into the next phase.
|
||||
The previous phase's actions must complete before {ilm} will check `min_age` and
|
||||
transition into the next phase. By default, {ilm} checks for indices that meet
|
||||
policy criteria, like `min_age`, every 10 minutes. You can use the
|
||||
`indices.lifecycle.poll_interval` cluster setting to control how often this
|
||||
check occurs.
|
||||
|
||||
=== Phase Execution
|
||||
|
||||
|
|
|
@ -57,12 +57,23 @@ PUT /twitter-1,twitter-2/_mapping <1>
|
|||
[float]
|
||||
==== Updating field mappings
|
||||
|
||||
In general, the mapping for existing fields cannot be updated. There are some
|
||||
exceptions to this rule. For instance:
|
||||
// tag::put-field-mapping-exceptions[]
|
||||
|
||||
* new <<properties>> can be added to <<object>> fields.
|
||||
* new <<multi-fields,multi-fields>> can be added to existing fields.
|
||||
* the <<ignore-above>> parameter can be updated.
|
||||
You can't change the mapping of an existing field, with the following
|
||||
exceptions:
|
||||
|
||||
* You can add new <<properties,properties>> to an <<object,`object`>> field.
|
||||
* You can use the <<multi-fields,`field`>> mapping parameter to enable
|
||||
multi-fields.
|
||||
* You can change the value of the <<ignore-above,`ignore_above`>> mapping
|
||||
parameter.
|
||||
|
||||
Changing the mapping of an existing field could invalidate data that's already
|
||||
indexed. If you need to change the mapping of a field, create a new index with
|
||||
the correct mappings and <<docs-reindex,reindex>> your data into that index. If
|
||||
you only want to rename a field, consider adding an <<alias, `alias`>> field.
|
||||
|
||||
// end::put-field-mapping-exceptions[]
|
||||
|
||||
For example:
|
||||
|
||||
|
|
|
@ -118,49 +118,151 @@ You know more about your data than Elasticsearch can guess, so while dynamic
|
|||
mapping can be useful to get started, at some point you will want to specify
|
||||
your own explicit mappings.
|
||||
|
||||
You can create field mappings when you
|
||||
<<indices-create-index,create an index>>, and you can add
|
||||
fields to an existing index with the <<indices-put-mapping,PUT mapping API>>.
|
||||
You can create field mappings when you <<create-mapping,create an index>> and
|
||||
<<add-field-mapping,add fields to an existing index>>.
|
||||
|
||||
[float]
|
||||
== Updating existing field mappings
|
||||
[[create-mapping]]
|
||||
== Create an index with an explicit mapping
|
||||
|
||||
Other than where documented, *existing field mappings cannot be
|
||||
updated*. Changing the mapping would mean invalidating already indexed
|
||||
documents. Instead, you should create a new index with the correct mappings
|
||||
and <<docs-reindex,reindex>> your data into that index. If you only wish
|
||||
to rename a field and not change its mappings, it may make sense to introduce
|
||||
an <<alias, `alias`>> field.
|
||||
|
||||
[float]
|
||||
== Example mapping
|
||||
|
||||
A mapping can be specified when creating an index, as follows:
|
||||
You can use the <<indices-create-index,create index>> API to create a new index
|
||||
with an explicit mapping.
|
||||
|
||||
[source,js]
|
||||
---------------------------------------
|
||||
PUT my_index <1>
|
||||
----
|
||||
PUT /my-index
|
||||
{
|
||||
"mappings": {
|
||||
"properties": { <2>
|
||||
"title": { "type": "text" }, <3>
|
||||
"name": { "type": "text" }, <4>
|
||||
"age": { "type": "integer" }, <5>
|
||||
"created": {
|
||||
"type": "date", <6>
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
"properties": {
|
||||
"age": { "type": "integer" }, <1>
|
||||
"email": { "type": "keyword" }, <2>
|
||||
"name": { "type": "text" } <3>
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
// CONSOLE
|
||||
|
||||
<1> Creates `age`, an <<number,`integer`>> field
|
||||
<2> Creates `email`, a <<keyword,`keyword`>> field
|
||||
<3> Creates `name`, a <<text,`text`>> field
|
||||
|
||||
[float]
|
||||
[[add-field-mapping]]
|
||||
== Add a field to an existing mapping
|
||||
|
||||
You can use the <<indices-put-mapping, put mapping>> API to add one or more new
|
||||
fields to an existing index.
|
||||
|
||||
The following example adds `employee-id`, a `keyword` field with an
|
||||
<<mapping-index,`index`>> mapping parameter value of `false`. This means values
|
||||
for the `employee-id` field are stored but not indexed or available for search.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
PUT /my-index/_mapping
|
||||
{
|
||||
"properties": {
|
||||
"employee-id": {
|
||||
"type": "keyword",
|
||||
"index": false
|
||||
}
|
||||
}
|
||||
}
|
||||
----
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
[float]
|
||||
[[update-mapping]]
|
||||
=== Update the mapping of a field
|
||||
|
||||
include::{docdir}/indices/put-mapping.asciidoc[tag=put-field-mapping-exceptions]
|
||||
|
||||
[float]
|
||||
[[view-mapping]]
|
||||
== View the mapping of an index
|
||||
|
||||
You can use the <<indices-get-mapping, get mapping>> API to view the mapping of
|
||||
an existing index.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
GET /my-index/_mapping
|
||||
----
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
The API returns the following response:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"my-index" : {
|
||||
"mappings" : {
|
||||
"properties" : {
|
||||
"age" : {
|
||||
"type" : "integer"
|
||||
},
|
||||
"email" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"employee-id" : {
|
||||
"type" : "keyword",
|
||||
"index" : false
|
||||
},
|
||||
"name" : {
|
||||
"type" : "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
---------------------------------------
|
||||
----
|
||||
// TESTRESPONSE
|
||||
|
||||
|
||||
[float]
|
||||
[[view-field-mapping]]
|
||||
== View the mapping of specific fields
|
||||
|
||||
If you only want to view the mapping of one or more specific fields, you can use
|
||||
the <<indices-get-field-mapping, get field mapping>> API.
|
||||
|
||||
This is useful if you don't need the complete mapping of an index or your index
|
||||
contains a large number of fields.
|
||||
|
||||
The following request retrieves the mapping for the `employee-id` field.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
GET /my-index/_mapping/field/employee-id
|
||||
----
|
||||
// CONSOLE
|
||||
<1> Create an index called `my_index`.
|
||||
<2> Specify the fields or _properties_ in the mapping.
|
||||
<3> Specify that the `title` field contains `text` values.
|
||||
<4> Specify that the `name` field contains `text` values.
|
||||
<5> Specify that the `age` field contains `integer` values.
|
||||
<6> Specify that the `created` field contains `date` values in two possible formats.
|
||||
// TEST[continued]
|
||||
|
||||
The API returns the following response:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"my-index" : {
|
||||
"mappings" : {
|
||||
"employee-id" : {
|
||||
"full_name" : "employee-id",
|
||||
"mapping" : {
|
||||
"employee-id" : {
|
||||
"type" : "keyword",
|
||||
"index" : false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
----
|
||||
// TESTRESPONSE
|
||||
|
||||
--
|
||||
|
||||
|
|
|
@ -57,3 +57,11 @@ The client method `termVector`, deprecated in 2.0, has been removed. The method
|
|||
|
||||
The constructor `AbstractLifecycleComponent(Settings settings)`, deprecated in 6.7
|
||||
has been removed. The parameterless constructor should be used instead.
|
||||
|
||||
[float]
|
||||
==== Changes to Geometry classes
|
||||
|
||||
Geometry classes used to represent geo values in SQL have been moved from the
|
||||
`org.elasticsearch.geo.geometry` package to the `org.elasticsearch.geometry`
|
||||
package and the order of the constructor parameters has changed from `lat`, `lon`
|
||||
to `lon`, `lat`.
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
[role="xpack"]
|
||||
[testenv="basic"]
|
||||
[[query-dsl-pinned-query]]
|
||||
=== Pinned Query
|
||||
Promotes selected documents to rank higher than those matching a given query.
|
||||
This feature is typically used to guide searchers to curated documents that are
|
||||
promoted over and above any "organic" matches for a search.
|
||||
The promoted or "pinned" documents are identified using the document IDs stored in
|
||||
the <<mapping-id-field,`_id`>> field.
|
||||
|
||||
==== Example request
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"pinned" : {
|
||||
"ids" : ["1", "4", "100"],
|
||||
"organic" : {
|
||||
"match":{
|
||||
"description": "iphone"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
[[pinned-query-top-level-parameters]]
|
||||
==== Top-level parameters for `pinned`
|
||||
|
||||
`ids`::
|
||||
An array of <<mapping-id-field, document IDs>> listed in the order they are to appear in results.
|
||||
`organic`::
|
||||
Any choice of query used to rank documents which will be ranked below the "pinned" document ids.
|
|
@ -31,6 +31,8 @@ A query that allows to modify the score of a sub-query with a script.
|
|||
<<query-dsl-wrapper-query,`wrapper` query>>::
|
||||
A query that accepts other queries as json or yaml string.
|
||||
|
||||
<<query-dsl-pinned-query,`pinned` query>>::
|
||||
A query that promotes selected documents over others matching a given query.
|
||||
|
||||
include::distance-feature-query.asciidoc[]
|
||||
|
||||
|
@ -44,4 +46,6 @@ include::script-query.asciidoc[]
|
|||
|
||||
include::script-score-query.asciidoc[]
|
||||
|
||||
include::wrapper-query.asciidoc[]
|
||||
include::wrapper-query.asciidoc[]
|
||||
|
||||
include::pinned-query.asciidoc[]
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
This section summarizes the changes in each release.
|
||||
|
||||
* <<release-notes-7.3.1>>
|
||||
* <<release-notes-7.3.0>>
|
||||
* <<release-notes-7.2.1>>
|
||||
* <<release-notes-7.2.0>>
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
[[release-notes-7.3.1]]
|
||||
== {es} version 7.3.1
|
||||
|
||||
Also see <<breaking-changes-7.3,Breaking changes in 7.3>>.
|
||||
|
||||
coming[7.3.1]
|
||||
|
||||
[[release-notes-7.3.0]]
|
||||
== {es} version 7.3.0
|
||||
|
||||
|
|
|
@ -11,31 +11,31 @@ Creates a {rollup-job}.
|
|||
|
||||
experimental[]
|
||||
|
||||
[[sample-api-request]]
|
||||
[[rollup-put-job-api-request]]
|
||||
==== {api-request-title}
|
||||
|
||||
`PUT _rollup/job/<job_id>`
|
||||
|
||||
[[sample-api-prereqs]]
|
||||
[[rollup-put-job-api-prereqs]]
|
||||
==== {api-prereq-title}
|
||||
|
||||
* If the {es} {security-features} are enabled, you must have `manage` or
|
||||
`manage_rollup` cluster privileges to use this API. For more information, see
|
||||
{stack-ov}/security-privileges.html[Security privileges].
|
||||
|
||||
[[sample-api-desc]]
|
||||
[[rollup-put-job-api-desc]]
|
||||
==== {api-description-title}
|
||||
|
||||
Jobs are created in a `STOPPED` state. You can start them with the
|
||||
<<rollup-start-job,start {rollup-jobs} API>>.
|
||||
|
||||
[[sample-api-path-params]]
|
||||
[[rollup-put-job-api-path-params]]
|
||||
==== {api-path-parms-title}
|
||||
|
||||
`job_id`::
|
||||
(Required, string) Identifier for the {rollup-job}.
|
||||
|
||||
[[sample-api-request-body]]
|
||||
[[rollup-put-job-api-request-body]]
|
||||
==== {api-request-body-title}
|
||||
|
||||
`cron`::
|
||||
|
@ -64,7 +64,7 @@ Jobs are created in a `STOPPED` state. You can start them with the
|
|||
|
||||
For more details about the job configuration, see <<rollup-job-config>>.
|
||||
|
||||
[[sample-api-example]]
|
||||
[[rollup-put-job-api-example]]
|
||||
==== {api-example-title}
|
||||
|
||||
The following example creates a {rollup-job} named "sensor", targeting the
|
||||
|
|
|
@ -175,7 +175,7 @@ snippets: `simple` or `span`. Only valid for the `plain` highlighter.
|
|||
Defaults to `span`.
|
||||
|
||||
`simple`::: Breaks up text into same-sized fragments.
|
||||
`span`::: Breaks up text into same-sized fragments, but tried to avoid
|
||||
`span`::: Breaks up text into same-sized fragments, but tries to avoid
|
||||
breaking up text between highlighted terms. This is helpful when you're
|
||||
querying for phrases. Default.
|
||||
|
||||
|
|
|
@ -13,3 +13,7 @@ The index alias to update when the index rolls over. Specify when using a
|
|||
policy that contains a rollover action. When the index rolls over, the alias is
|
||||
updated to reflect that the index is no longer the write index. For more
|
||||
information about rollover, see <<using-policies-rollover>>.
|
||||
|
||||
`indices.lifecycle.poll_interval`::
|
||||
(<<time-units, time units>>) How often {ilm} checks for indices that meet policy
|
||||
criteria. Defaults to `10m`.
|
||||
|
|
|
@ -17,7 +17,9 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
/**
|
||||
* Circle geometry (not part of WKT standard, but used in elasticsearch) defined by lat/lon coordinates of the center in degrees
|
||||
|
@ -25,27 +27,27 @@ package org.elasticsearch.geo.geometry;
|
|||
*/
|
||||
public class Circle implements Geometry {
|
||||
public static final Circle EMPTY = new Circle();
|
||||
private final double lat;
|
||||
private final double lon;
|
||||
private final double alt;
|
||||
private final double y;
|
||||
private final double x;
|
||||
private final double z;
|
||||
private final double radiusMeters;
|
||||
|
||||
private Circle() {
|
||||
lat = 0;
|
||||
lon = 0;
|
||||
alt = Double.NaN;
|
||||
y = 0;
|
||||
x = 0;
|
||||
z = Double.NaN;
|
||||
radiusMeters = -1;
|
||||
}
|
||||
|
||||
public Circle(final double lat, final double lon, final double radiusMeters) {
|
||||
this(lat, lon, Double.NaN, radiusMeters);
|
||||
public Circle(final double x, final double y, final double radiusMeters) {
|
||||
this(x, y, Double.NaN, radiusMeters);
|
||||
}
|
||||
|
||||
public Circle(final double lat, final double lon, final double alt, final double radiusMeters) {
|
||||
this.lat = lat;
|
||||
this.lon = lon;
|
||||
public Circle(final double x, final double y, final double z, final double radiusMeters) {
|
||||
this.y = y;
|
||||
this.x = x;
|
||||
this.radiusMeters = radiusMeters;
|
||||
this.alt = alt;
|
||||
this.z = z;
|
||||
if (radiusMeters < 0 ) {
|
||||
throw new IllegalArgumentException("Circle radius [" + radiusMeters + "] cannot be negative");
|
||||
}
|
||||
|
@ -56,20 +58,32 @@ public class Circle implements Geometry {
|
|||
return ShapeType.CIRCLE;
|
||||
}
|
||||
|
||||
public double getLat() {
|
||||
return lat;
|
||||
public double getY() {
|
||||
return y;
|
||||
}
|
||||
|
||||
public double getLon() {
|
||||
return lon;
|
||||
public double getX() {
|
||||
return x;
|
||||
}
|
||||
|
||||
public double getRadiusMeters() {
|
||||
return radiusMeters;
|
||||
}
|
||||
|
||||
public double getZ() {
|
||||
return z;
|
||||
}
|
||||
|
||||
public double getLat() {
|
||||
return y;
|
||||
}
|
||||
|
||||
public double getLon() {
|
||||
return x;
|
||||
}
|
||||
|
||||
public double getAlt() {
|
||||
return alt;
|
||||
return z;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -78,23 +92,23 @@ public class Circle implements Geometry {
|
|||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
Circle circle = (Circle) o;
|
||||
if (Double.compare(circle.lat, lat) != 0) return false;
|
||||
if (Double.compare(circle.lon, lon) != 0) return false;
|
||||
if (Double.compare(circle.y, y) != 0) return false;
|
||||
if (Double.compare(circle.x, x) != 0) return false;
|
||||
if (Double.compare(circle.radiusMeters, radiusMeters) != 0) return false;
|
||||
return (Double.compare(circle.alt, alt) == 0);
|
||||
return (Double.compare(circle.z, z) == 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result;
|
||||
long temp;
|
||||
temp = Double.doubleToLongBits(lat);
|
||||
temp = Double.doubleToLongBits(y);
|
||||
result = (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(lon);
|
||||
temp = Double.doubleToLongBits(x);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(radiusMeters);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(alt);
|
||||
temp = Double.doubleToLongBits(z);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
return result;
|
||||
}
|
||||
|
@ -111,11 +125,11 @@ public class Circle implements Geometry {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "lat=" + lat + ", lon=" + lon + ", radius=" + radiusMeters + (Double.isNaN(alt) ? ", alt=" + alt : "");
|
||||
return WellKnownText.INSTANCE.toWKT(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAlt() {
|
||||
return Double.isNaN(alt) == false;
|
||||
public boolean hasZ() {
|
||||
return Double.isNaN(z) == false;
|
||||
}
|
||||
}
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
/**
|
||||
* Base class for all Geometry objects supported by elasticsearch
|
||||
|
@ -30,7 +30,11 @@ public interface Geometry {
|
|||
|
||||
boolean isEmpty();
|
||||
|
||||
default boolean hasAlt() {
|
||||
default boolean hasZ() {
|
||||
return false;
|
||||
}
|
||||
|
||||
default boolean hasAlt() {
|
||||
return hasZ();
|
||||
}
|
||||
}
|
|
@ -16,12 +16,13 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
|
@ -42,9 +43,9 @@ public class GeometryCollection<G extends Geometry> implements Geometry, Iterabl
|
|||
if (shapes == null || shapes.isEmpty()) {
|
||||
throw new IllegalArgumentException("the list of shapes cannot be null or empty");
|
||||
}
|
||||
hasAlt = shapes.get(0).hasAlt();
|
||||
hasAlt = shapes.get(0).hasZ();
|
||||
for (G shape : shapes) {
|
||||
if (shape.hasAlt() != hasAlt) {
|
||||
if (shape.hasZ() != hasAlt) {
|
||||
throw new IllegalArgumentException("all elements of the collection should have the same number of dimension");
|
||||
}
|
||||
}
|
||||
|
@ -93,16 +94,12 @@ public class GeometryCollection<G extends Geometry> implements Geometry, Iterabl
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAlt() {
|
||||
public boolean hasZ() {
|
||||
return hasAlt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(type().name().toLowerCase(Locale.ROOT)).append("(shapes=");
|
||||
sb.append(shapes);
|
||||
sb.append(")");
|
||||
return sb.toString();
|
||||
return WellKnownText.INSTANCE.toWKT(this);
|
||||
}
|
||||
}
|
|
@ -17,7 +17,9 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
/**
|
||||
* Support class for creating Geometry Visitors.
|
||||
|
@ -40,7 +42,7 @@ package org.elasticsearch.geo.geometry;
|
|||
* The Visitor Pattern replaces this structure with Interface inheritance making it easier to identify all places that are using this
|
||||
* structure, and making a shape a compile-time failure instead of runtime.
|
||||
* <p>
|
||||
* See {@link org.elasticsearch.geo.utils.WellKnownText#toWKT(Geometry, StringBuilder)} for an example of how this interface is used.
|
||||
* See {@link WellKnownText#toWKT(Geometry, StringBuilder)} for an example of how this interface is used.
|
||||
*
|
||||
* @see <a href="https://en.wikipedia.org/wiki/Visitor_pattern">Visitor Pattern</a>
|
||||
*/
|
|
@ -17,7 +17,9 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
|
@ -26,71 +28,99 @@ import java.util.Arrays;
|
|||
*/
|
||||
public class Line implements Geometry {
|
||||
public static final Line EMPTY = new Line();
|
||||
private final double[] lats;
|
||||
private final double[] lons;
|
||||
private final double[] alts;
|
||||
private final double[] y;
|
||||
private final double[] x;
|
||||
private final double[] z;
|
||||
|
||||
protected Line() {
|
||||
lats = new double[0];
|
||||
lons = new double[0];
|
||||
alts = null;
|
||||
y = new double[0];
|
||||
x = new double[0];
|
||||
z = null;
|
||||
}
|
||||
|
||||
public Line(double[] lats, double[] lons) {
|
||||
this(lats, lons, null);
|
||||
public Line(double[] x, double[] y) {
|
||||
this(x, y, null);
|
||||
}
|
||||
|
||||
public Line(double[] lats, double[] lons, double[] alts) {
|
||||
this.lats = lats;
|
||||
this.lons = lons;
|
||||
this.alts = alts;
|
||||
if (lats == null) {
|
||||
throw new IllegalArgumentException("lats must not be null");
|
||||
public Line(double[] x, double[] y, double[] z) {
|
||||
this.y = y;
|
||||
this.x = x;
|
||||
this.z = z;
|
||||
if (y == null) {
|
||||
throw new IllegalArgumentException("y must not be null");
|
||||
}
|
||||
if (lons == null) {
|
||||
throw new IllegalArgumentException("lons must not be null");
|
||||
if (x == null) {
|
||||
throw new IllegalArgumentException("x must not be null");
|
||||
}
|
||||
if (lats.length != lons.length) {
|
||||
throw new IllegalArgumentException("lats and lons must be equal length");
|
||||
if (y.length != x.length) {
|
||||
throw new IllegalArgumentException("x and y must be equal length");
|
||||
}
|
||||
if (lats.length < 2) {
|
||||
if (y.length < 2) {
|
||||
throw new IllegalArgumentException("at least two points in the line is required");
|
||||
}
|
||||
if (alts != null && alts.length != lats.length) {
|
||||
throw new IllegalArgumentException("alts and lats must be equal length");
|
||||
if (z != null && z.length != x.length) {
|
||||
throw new IllegalArgumentException("z and x must be equal length");
|
||||
}
|
||||
}
|
||||
|
||||
public int length() {
|
||||
return lats.length;
|
||||
return y.length;
|
||||
}
|
||||
|
||||
public double getY(int i) {
|
||||
return y[i];
|
||||
}
|
||||
|
||||
public double getX(int i) {
|
||||
return x[i];
|
||||
}
|
||||
|
||||
public double getZ(int i) {
|
||||
if (z != null) {
|
||||
return z[i];
|
||||
} else {
|
||||
return Double.NaN;
|
||||
}
|
||||
}
|
||||
|
||||
public double[] getY() {
|
||||
return y.clone();
|
||||
}
|
||||
|
||||
public double[] getX() {
|
||||
return x.clone();
|
||||
}
|
||||
|
||||
public double[] getZ() {
|
||||
return z == null ? null : z.clone();
|
||||
}
|
||||
|
||||
public double getLat(int i) {
|
||||
return lats[i];
|
||||
return y[i];
|
||||
}
|
||||
|
||||
public double getLon(int i) {
|
||||
return lons[i];
|
||||
return x[i];
|
||||
}
|
||||
|
||||
public double getAlt(int i) {
|
||||
if (alts != null) {
|
||||
return alts[i];
|
||||
if (z != null) {
|
||||
return z[i];
|
||||
} else {
|
||||
return Double.NaN;
|
||||
}
|
||||
}
|
||||
|
||||
public double[] getLats() {
|
||||
return lats.clone();
|
||||
return y.clone();
|
||||
}
|
||||
|
||||
public double[] getLons() {
|
||||
return lons.clone();
|
||||
return x.clone();
|
||||
}
|
||||
|
||||
public double[] getAlts() {
|
||||
return alts == null ? null : alts.clone();
|
||||
return z == null ? null : z.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -105,7 +135,7 @@ public class Line implements Geometry {
|
|||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return lats.length == 0;
|
||||
return y.length == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -113,27 +143,25 @@ public class Line implements Geometry {
|
|||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
Line line = (Line) o;
|
||||
return Arrays.equals(lats, line.lats) &&
|
||||
Arrays.equals(lons, line.lons) && Arrays.equals(alts, line.alts);
|
||||
return Arrays.equals(y, line.y) &&
|
||||
Arrays.equals(x, line.x) && Arrays.equals(z, line.z);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Arrays.hashCode(lats);
|
||||
result = 31 * result + Arrays.hashCode(lons);
|
||||
result = 31 * result + Arrays.hashCode(alts);
|
||||
int result = Arrays.hashCode(y);
|
||||
result = 31 * result + Arrays.hashCode(x);
|
||||
result = 31 * result + Arrays.hashCode(z);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAlt() {
|
||||
return alts != null;
|
||||
public boolean hasZ() {
|
||||
return z != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "lats=" + Arrays.toString(lats) +
|
||||
", lons=" + Arrays.toString(lons) +
|
||||
(hasAlt() ? ", alts=" + Arrays.toString(alts) : "");
|
||||
return WellKnownText.INSTANCE.toWKT(this);
|
||||
}
|
||||
}
|
|
@ -17,7 +17,9 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Represents a closed line on the earth's surface in lat/lon decimal degrees and optional altitude in meters.
|
||||
|
@ -30,21 +32,21 @@ public class LinearRing extends Line {
|
|||
private LinearRing() {
|
||||
}
|
||||
|
||||
public LinearRing(double[] lats, double[] lons) {
|
||||
this(lats, lons, null);
|
||||
public LinearRing(double[] x, double[] y) {
|
||||
this(x, y, null);
|
||||
}
|
||||
|
||||
public LinearRing(double[] lats, double[] lons, double[] alts) {
|
||||
super(lats, lons, alts);
|
||||
if (lats.length < 2) {
|
||||
throw new IllegalArgumentException("linear ring cannot contain less than 2 points, found " + lats.length);
|
||||
public LinearRing(double[] x, double[] y, double[] z) {
|
||||
super(x, y, z);
|
||||
if (x.length < 2) {
|
||||
throw new IllegalArgumentException("linear ring cannot contain less than 2 points, found " + x.length);
|
||||
}
|
||||
int last = lats.length - 1;
|
||||
if (lats[0] != lats[last] || lons[0] != lons[last] || (alts != null && alts[0] != alts[last])) {
|
||||
int last = x.length - 1;
|
||||
if (x[0] != x[last] || y[0] != y[last] || (z != null && z[0] != z[last])) {
|
||||
throw new IllegalArgumentException("first and last points of the linear ring must be the same (it must close itself):" +
|
||||
" lats[0]=" + lats[0] + " lats[" + last + "]=" + lats[last] +
|
||||
" lons[0]=" + lons[0] + " lons[" + last + "]=" + lons[last] +
|
||||
(alts == null ? "" : " alts[0]=" + alts[0] + " alts[" + last + "]=" + alts[last] ));
|
||||
" x[0]=" + x[0] + " x[" + last + "]=" + x[last] +
|
||||
" y[0]=" + y[0] + " y[" + last + "]=" + y[last] +
|
||||
(z == null ? "" : " z[0]=" + z[0] + " z[" + last + "]=" + z[last] ));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,4 +59,11 @@ public class LinearRing extends Line {
|
|||
public <T, E extends Exception> T visit(GeometryVisitor<T, E> visitor) throws E {
|
||||
return visitor.visit(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "linearring(x=" + Arrays.toString(getX()) +
|
||||
", y=" + Arrays.toString(getY()) +
|
||||
(hasZ() ? ", z=" + Arrays.toString(getZ()) : "");
|
||||
}
|
||||
}
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import java.util.List;
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import java.util.List;
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import java.util.List;
|
||||
|
|
@ -17,7 +17,9 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
/**
|
||||
* Represents a Point on the earth's surface in decimal degrees and optional altitude in meters.
|
||||
|
@ -25,26 +27,26 @@ package org.elasticsearch.geo.geometry;
|
|||
public class Point implements Geometry {
|
||||
public static final Point EMPTY = new Point();
|
||||
|
||||
private final double lat;
|
||||
private final double lon;
|
||||
private final double alt;
|
||||
private final double y;
|
||||
private final double x;
|
||||
private final double z;
|
||||
private final boolean empty;
|
||||
|
||||
private Point() {
|
||||
lat = 0;
|
||||
lon = 0;
|
||||
alt = Double.NaN;
|
||||
y = 0;
|
||||
x = 0;
|
||||
z = Double.NaN;
|
||||
empty = true;
|
||||
}
|
||||
|
||||
public Point(double lat, double lon) {
|
||||
this(lat, lon, Double.NaN);
|
||||
public Point(double x, double y) {
|
||||
this(x, y, Double.NaN);
|
||||
}
|
||||
|
||||
public Point(double lat, double lon, double alt) {
|
||||
this.lat = lat;
|
||||
this.lon = lon;
|
||||
this.alt = alt;
|
||||
public Point(double x, double y, double z) {
|
||||
this.y = y;
|
||||
this.x = x;
|
||||
this.z = z;
|
||||
this.empty = false;
|
||||
}
|
||||
|
||||
|
@ -53,16 +55,28 @@ public class Point implements Geometry {
|
|||
return ShapeType.POINT;
|
||||
}
|
||||
|
||||
public double getY() {
|
||||
return y;
|
||||
}
|
||||
|
||||
public double getX() {
|
||||
return x;
|
||||
}
|
||||
|
||||
public double getZ() {
|
||||
return z;
|
||||
}
|
||||
|
||||
public double getLat() {
|
||||
return lat;
|
||||
return y;
|
||||
}
|
||||
|
||||
public double getLon() {
|
||||
return lon;
|
||||
return x;
|
||||
}
|
||||
|
||||
public double getAlt() {
|
||||
return alt;
|
||||
return z;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -72,20 +86,20 @@ public class Point implements Geometry {
|
|||
|
||||
Point point = (Point) o;
|
||||
if (point.empty != empty) return false;
|
||||
if (Double.compare(point.lat, lat) != 0) return false;
|
||||
if (Double.compare(point.lon, lon) != 0) return false;
|
||||
return Double.compare(point.alt, alt) == 0;
|
||||
if (Double.compare(point.y, y) != 0) return false;
|
||||
if (Double.compare(point.x, x) != 0) return false;
|
||||
return Double.compare(point.z, z) == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result;
|
||||
long temp;
|
||||
temp = Double.doubleToLongBits(lat);
|
||||
temp = Double.doubleToLongBits(y);
|
||||
result = (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(lon);
|
||||
temp = Double.doubleToLongBits(x);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(alt);
|
||||
temp = Double.doubleToLongBits(z);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
return result;
|
||||
}
|
||||
|
@ -101,12 +115,13 @@ public class Point implements Geometry {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAlt() {
|
||||
return Double.isNaN(alt) == false;
|
||||
public boolean hasZ() {
|
||||
return Double.isNaN(z) == false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "lat=" + lat + ", lon=" + lon + (hasAlt() ? ", alt=" + alt : "");
|
||||
return WellKnownText.INSTANCE.toWKT(this);
|
||||
}
|
||||
|
||||
}
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -47,10 +47,10 @@ public final class Polygon implements Geometry {
|
|||
if (holes == null) {
|
||||
throw new IllegalArgumentException("holes must not be null");
|
||||
}
|
||||
boolean hasAlt = polygon.hasAlt();
|
||||
boolean hasAlt = polygon.hasZ();
|
||||
checkRing(polygon);
|
||||
for (LinearRing hole : holes) {
|
||||
if (hole.hasAlt() != hasAlt) {
|
||||
if (hole.hasZ() != hasAlt) {
|
||||
throw new IllegalArgumentException("holes must have the same number of dimensions as the polygon");
|
||||
}
|
||||
checkRing(hole);
|
||||
|
@ -102,7 +102,7 @@ public final class Polygon implements Geometry {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAlt() {
|
||||
public boolean hasZ() {
|
||||
return hasAlt;
|
||||
}
|
||||
|
|
@ -17,7 +17,9 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
/**
|
||||
* Represents a lat/lon rectangle in decimal degrees and optional altitude in meters.
|
||||
|
@ -25,90 +27,114 @@ package org.elasticsearch.geo.geometry;
|
|||
public class Rectangle implements Geometry {
|
||||
public static final Rectangle EMPTY = new Rectangle();
|
||||
/**
|
||||
* maximum longitude value (in degrees)
|
||||
* minimum latitude value (in degrees)
|
||||
*/
|
||||
private final double minLat;
|
||||
private final double minY;
|
||||
/**
|
||||
* minimum longitude value (in degrees)
|
||||
*/
|
||||
private final double minLon;
|
||||
private final double minX;
|
||||
/**
|
||||
* maximum altitude value (in meters)
|
||||
*/
|
||||
private final double minAlt;
|
||||
private final double minZ;
|
||||
/**
|
||||
* maximum latitude value (in degrees)
|
||||
*/
|
||||
private final double maxLat;
|
||||
private final double maxY;
|
||||
/**
|
||||
* minimum latitude value (in degrees)
|
||||
* minimum longitude value (in degrees)
|
||||
*/
|
||||
private final double maxLon;
|
||||
private final double maxX;
|
||||
/**
|
||||
* minimum altitude value (in meters)
|
||||
*/
|
||||
private final double maxAlt;
|
||||
private final double maxZ;
|
||||
|
||||
private final boolean empty;
|
||||
|
||||
private Rectangle() {
|
||||
minLat = 0;
|
||||
minLon = 0;
|
||||
maxLat = 0;
|
||||
maxLon = 0;
|
||||
minAlt = Double.NaN;
|
||||
maxAlt = Double.NaN;
|
||||
minY = 0;
|
||||
minX = 0;
|
||||
maxY = 0;
|
||||
maxX = 0;
|
||||
minZ = Double.NaN;
|
||||
maxZ = Double.NaN;
|
||||
empty = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a bounding box by first validating the provided latitude and longitude coordinates
|
||||
*/
|
||||
public Rectangle(double minLat, double maxLat, double minLon, double maxLon) {
|
||||
this(minLat, maxLat, minLon, maxLon, Double.NaN, Double.NaN);
|
||||
public Rectangle(double minX, double maxX, double maxY, double minY) {
|
||||
this(minX, maxX, maxY, minY, Double.NaN, Double.NaN);
|
||||
}
|
||||
/**
|
||||
* Constructs a bounding box by first validating the provided latitude and longitude coordinates
|
||||
*/
|
||||
public Rectangle(double minLat, double maxLat, double minLon, double maxLon, double minAlt, double maxAlt) {
|
||||
this.minLon = minLon;
|
||||
this.maxLon = maxLon;
|
||||
this.minLat = minLat;
|
||||
this.maxLat = maxLat;
|
||||
this.minAlt = minAlt;
|
||||
this.maxAlt = maxAlt;
|
||||
public Rectangle(double minX, double maxX, double maxY, double minY, double minZ, double maxZ) {
|
||||
this.minX = minX;
|
||||
this.maxX = maxX;
|
||||
this.minY = minY;
|
||||
this.maxY = maxY;
|
||||
this.minZ = minZ;
|
||||
this.maxZ = maxZ;
|
||||
empty = false;
|
||||
if (maxLat < minLat) {
|
||||
throw new IllegalArgumentException("max lat cannot be less than min lat");
|
||||
if (maxY < minY) {
|
||||
throw new IllegalArgumentException("max y cannot be less than min x");
|
||||
}
|
||||
if (Double.isNaN(minAlt) != Double.isNaN(maxAlt)) {
|
||||
throw new IllegalArgumentException("only one altitude value is specified");
|
||||
if (Double.isNaN(minZ) != Double.isNaN(maxZ)) {
|
||||
throw new IllegalArgumentException("only one z value is specified");
|
||||
}
|
||||
}
|
||||
|
||||
public double getMinY() {
|
||||
return minY;
|
||||
}
|
||||
|
||||
public double getMinX() {
|
||||
return minX;
|
||||
}
|
||||
|
||||
public double getMinZ() {
|
||||
return minZ;
|
||||
}
|
||||
|
||||
public double getMaxY() {
|
||||
return maxY;
|
||||
}
|
||||
|
||||
public double getMaxX() {
|
||||
return maxX;
|
||||
}
|
||||
|
||||
public double getMaxZ() {
|
||||
return maxZ;
|
||||
}
|
||||
|
||||
public double getMinLat() {
|
||||
return minLat;
|
||||
return minY;
|
||||
}
|
||||
|
||||
public double getMinLon() {
|
||||
return minLon;
|
||||
return minX;
|
||||
}
|
||||
|
||||
|
||||
public double getMinAlt() {
|
||||
return minAlt;
|
||||
return minZ;
|
||||
}
|
||||
|
||||
public double getMaxLat() {
|
||||
return maxLat;
|
||||
return maxY;
|
||||
}
|
||||
|
||||
public double getMaxLon() {
|
||||
return maxLon;
|
||||
return maxX;
|
||||
}
|
||||
|
||||
public double getMaxAlt() {
|
||||
return maxAlt;
|
||||
return maxZ;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -118,29 +144,10 @@ public class Rectangle implements Geometry {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Rectangle(lat=");
|
||||
b.append(minLat);
|
||||
b.append(" TO ");
|
||||
b.append(maxLat);
|
||||
b.append(" lon=");
|
||||
b.append(minLon);
|
||||
b.append(" TO ");
|
||||
b.append(maxLon);
|
||||
if (maxLon < minLon) {
|
||||
b.append(" [crosses dateline!]");
|
||||
}
|
||||
if (hasAlt()) {
|
||||
b.append(" alt=");
|
||||
b.append(minAlt);
|
||||
b.append(" TO ");
|
||||
b.append(maxAlt);
|
||||
}
|
||||
b.append(")");
|
||||
|
||||
return b.toString();
|
||||
return WellKnownText.INSTANCE.toWKT(this);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
|
@ -148,12 +155,12 @@ public class Rectangle implements Geometry {
|
|||
|
||||
Rectangle rectangle = (Rectangle) o;
|
||||
|
||||
if (Double.compare(rectangle.minLat, minLat) != 0) return false;
|
||||
if (Double.compare(rectangle.minLon, minLon) != 0) return false;
|
||||
if (Double.compare(rectangle.maxLat, maxLat) != 0) return false;
|
||||
if (Double.compare(rectangle.maxLon, maxLon) != 0) return false;
|
||||
if (Double.compare(rectangle.minAlt, minAlt) != 0) return false;
|
||||
return Double.compare(rectangle.maxAlt, maxAlt) == 0;
|
||||
if (Double.compare(rectangle.minY, minY) != 0) return false;
|
||||
if (Double.compare(rectangle.minX, minX) != 0) return false;
|
||||
if (Double.compare(rectangle.maxY, maxY) != 0) return false;
|
||||
if (Double.compare(rectangle.maxX, maxX) != 0) return false;
|
||||
if (Double.compare(rectangle.minZ, minZ) != 0) return false;
|
||||
return Double.compare(rectangle.maxZ, maxZ) == 0;
|
||||
|
||||
}
|
||||
|
||||
|
@ -161,17 +168,17 @@ public class Rectangle implements Geometry {
|
|||
public int hashCode() {
|
||||
int result;
|
||||
long temp;
|
||||
temp = Double.doubleToLongBits(minLat);
|
||||
temp = Double.doubleToLongBits(minY);
|
||||
result = (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(minLon);
|
||||
temp = Double.doubleToLongBits(minX);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(maxLat);
|
||||
temp = Double.doubleToLongBits(maxY);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(maxLon);
|
||||
temp = Double.doubleToLongBits(maxX);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(minAlt);
|
||||
temp = Double.doubleToLongBits(minZ);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(maxAlt);
|
||||
temp = Double.doubleToLongBits(maxZ);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
return result;
|
||||
}
|
||||
|
@ -187,7 +194,7 @@ public class Rectangle implements Geometry {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean hasAlt() {
|
||||
return Double.isNaN(maxAlt) == false;
|
||||
public boolean hasZ() {
|
||||
return Double.isNaN(maxZ) == false;
|
||||
}
|
||||
}
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import java.util.Locale;
|
||||
|
|
@ -19,6 +19,6 @@
|
|||
|
||||
|
||||
/**
|
||||
* Common Geo classes
|
||||
* Common Geometry classes
|
||||
*/
|
||||
package org.elasticsearch.geo;
|
||||
package org.elasticsearch.geometry;
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.geo.utils;
|
||||
package org.elasticsearch.geometry.utils;
|
||||
|
||||
/**
|
||||
* Utilities for common Bit twiddling methods. Borrowed heavily from Lucene (org.apache.lucene.util.BitUtil).
|
|
@ -17,20 +17,20 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.utils;
|
||||
package org.elasticsearch.geometry.utils;
|
||||
|
||||
import org.elasticsearch.geo.geometry.Circle;
|
||||
import org.elasticsearch.geo.geometry.Geometry;
|
||||
import org.elasticsearch.geo.geometry.GeometryCollection;
|
||||
import org.elasticsearch.geo.geometry.GeometryVisitor;
|
||||
import org.elasticsearch.geo.geometry.Line;
|
||||
import org.elasticsearch.geo.geometry.LinearRing;
|
||||
import org.elasticsearch.geo.geometry.MultiLine;
|
||||
import org.elasticsearch.geo.geometry.MultiPoint;
|
||||
import org.elasticsearch.geo.geometry.MultiPolygon;
|
||||
import org.elasticsearch.geo.geometry.Point;
|
||||
import org.elasticsearch.geo.geometry.Polygon;
|
||||
import org.elasticsearch.geo.geometry.Rectangle;
|
||||
import org.elasticsearch.geometry.Circle;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.GeometryCollection;
|
||||
import org.elasticsearch.geometry.GeometryVisitor;
|
||||
import org.elasticsearch.geometry.Line;
|
||||
import org.elasticsearch.geometry.LinearRing;
|
||||
import org.elasticsearch.geometry.MultiLine;
|
||||
import org.elasticsearch.geometry.MultiPoint;
|
||||
import org.elasticsearch.geometry.MultiPolygon;
|
||||
import org.elasticsearch.geometry.Point;
|
||||
import org.elasticsearch.geometry.Polygon;
|
||||
import org.elasticsearch.geometry.Rectangle;
|
||||
|
||||
/**
|
||||
* Validator that checks that lats are between -90 and +90 and lons are between -180 and +180 and altitude is present only if
|
||||
|
@ -97,9 +97,9 @@ public class GeographyValidator implements GeometryValidator {
|
|||
|
||||
@Override
|
||||
public Void visit(Circle circle) throws RuntimeException {
|
||||
checkLatitude(circle.getLat());
|
||||
checkLongitude(circle.getLon());
|
||||
checkAltitude(circle.getAlt());
|
||||
checkLatitude(circle.getY());
|
||||
checkLongitude(circle.getX());
|
||||
checkAltitude(circle.getZ());
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -114,9 +114,9 @@ public class GeographyValidator implements GeometryValidator {
|
|||
@Override
|
||||
public Void visit(Line line) throws RuntimeException {
|
||||
for (int i = 0; i < line.length(); i++) {
|
||||
checkLatitude(line.getLat(i));
|
||||
checkLongitude(line.getLon(i));
|
||||
checkAltitude(line.getAlt(i));
|
||||
checkLatitude(line.getY(i));
|
||||
checkLongitude(line.getX(i));
|
||||
checkAltitude(line.getZ(i));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -124,9 +124,9 @@ public class GeographyValidator implements GeometryValidator {
|
|||
@Override
|
||||
public Void visit(LinearRing ring) throws RuntimeException {
|
||||
for (int i = 0; i < ring.length(); i++) {
|
||||
checkLatitude(ring.getLat(i));
|
||||
checkLongitude(ring.getLon(i));
|
||||
checkAltitude(ring.getAlt(i));
|
||||
checkLatitude(ring.getY(i));
|
||||
checkLongitude(ring.getX(i));
|
||||
checkAltitude(ring.getZ(i));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -148,9 +148,9 @@ public class GeographyValidator implements GeometryValidator {
|
|||
|
||||
@Override
|
||||
public Void visit(Point point) throws RuntimeException {
|
||||
checkLatitude(point.getLat());
|
||||
checkLongitude(point.getLon());
|
||||
checkAltitude(point.getAlt());
|
||||
checkLatitude(point.getY());
|
||||
checkLongitude(point.getX());
|
||||
checkAltitude(point.getZ());
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -165,12 +165,12 @@ public class GeographyValidator implements GeometryValidator {
|
|||
|
||||
@Override
|
||||
public Void visit(Rectangle rectangle) throws RuntimeException {
|
||||
checkLatitude(rectangle.getMinLat());
|
||||
checkLatitude(rectangle.getMaxLat());
|
||||
checkLongitude(rectangle.getMinLon());
|
||||
checkLongitude(rectangle.getMaxLon());
|
||||
checkAltitude(rectangle.getMinAlt());
|
||||
checkAltitude(rectangle.getMaxAlt());
|
||||
checkLatitude(rectangle.getMinY());
|
||||
checkLatitude(rectangle.getMaxY());
|
||||
checkLongitude(rectangle.getMinX());
|
||||
checkLongitude(rectangle.getMaxX());
|
||||
checkAltitude(rectangle.getMinZ());
|
||||
checkAltitude(rectangle.getMaxZ());
|
||||
return null;
|
||||
}
|
||||
});
|
|
@ -16,10 +16,10 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.geo.utils;
|
||||
package org.elasticsearch.geometry.utils;
|
||||
|
||||
import org.elasticsearch.geo.geometry.Point;
|
||||
import org.elasticsearch.geo.geometry.Rectangle;
|
||||
import org.elasticsearch.geometry.Point;
|
||||
import org.elasticsearch.geometry.Rectangle;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
@ -61,7 +61,7 @@ public class Geohash {
|
|||
/** Returns a {@link Point} instance from a geohash string */
|
||||
public static Point toPoint(final String geohash) throws IllegalArgumentException {
|
||||
final long hash = mortonEncode(geohash);
|
||||
return new Point(decodeLatitude(hash), decodeLongitude(hash));
|
||||
return new Point(decodeLongitude(hash), decodeLatitude(hash));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -85,15 +85,15 @@ public class Geohash {
|
|||
// add 1 to lat and lon to get topRight
|
||||
ghLong = BitUtil.interleave((int)(lat + 1), (int)(lon + 1)) << 4 | len;
|
||||
final long mortonHash = BitUtil.flipFlop((ghLong >>> 4) << shift);
|
||||
Point topRight = new Point(decodeLatitude(mortonHash), decodeLongitude(mortonHash));
|
||||
return new Rectangle(bottomLeft.getLat(), topRight.getLat(), bottomLeft.getLon(), topRight.getLon());
|
||||
Point topRight = new Point(decodeLongitude(mortonHash), decodeLatitude(mortonHash));
|
||||
return new Rectangle(bottomLeft.getX(), topRight.getX(), topRight.getY(), bottomLeft.getY());
|
||||
} else {
|
||||
// We cannot go north of north pole, so just using 90 degrees instead of calculating it using
|
||||
// add 1 to lon to get lon of topRight, we are going to use 90 for lat
|
||||
ghLong = BitUtil.interleave((int)lat, (int)(lon + 1)) << 4 | len;
|
||||
final long mortonHash = BitUtil.flipFlop((ghLong >>> 4) << shift);
|
||||
Point topRight = new Point(decodeLatitude(mortonHash), decodeLongitude(mortonHash));
|
||||
return new Rectangle(bottomLeft.getLat(), 90D, bottomLeft.getLon(), topRight.getLon());
|
||||
Point topRight = new Point(decodeLongitude(mortonHash), decodeLatitude(mortonHash));
|
||||
return new Rectangle(bottomLeft.getX(), topRight.getX(), 90D, bottomLeft.getY());
|
||||
}
|
||||
}
|
||||
|
|
@ -17,9 +17,9 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.utils;
|
||||
package org.elasticsearch.geometry.utils;
|
||||
|
||||
import org.elasticsearch.geo.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
|
||||
/**
|
||||
* Generic geometry validator that can be used by the parser to verify the validity of the parsed geometry
|
|
@ -17,20 +17,20 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.utils;
|
||||
package org.elasticsearch.geometry.utils;
|
||||
|
||||
import org.elasticsearch.geo.geometry.Circle;
|
||||
import org.elasticsearch.geo.geometry.Geometry;
|
||||
import org.elasticsearch.geo.geometry.GeometryCollection;
|
||||
import org.elasticsearch.geo.geometry.GeometryVisitor;
|
||||
import org.elasticsearch.geo.geometry.Line;
|
||||
import org.elasticsearch.geo.geometry.LinearRing;
|
||||
import org.elasticsearch.geo.geometry.MultiLine;
|
||||
import org.elasticsearch.geo.geometry.MultiPoint;
|
||||
import org.elasticsearch.geo.geometry.MultiPolygon;
|
||||
import org.elasticsearch.geo.geometry.Point;
|
||||
import org.elasticsearch.geo.geometry.Polygon;
|
||||
import org.elasticsearch.geo.geometry.Rectangle;
|
||||
import org.elasticsearch.geometry.Circle;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.GeometryCollection;
|
||||
import org.elasticsearch.geometry.GeometryVisitor;
|
||||
import org.elasticsearch.geometry.Line;
|
||||
import org.elasticsearch.geometry.LinearRing;
|
||||
import org.elasticsearch.geometry.MultiLine;
|
||||
import org.elasticsearch.geometry.MultiPoint;
|
||||
import org.elasticsearch.geometry.MultiPolygon;
|
||||
import org.elasticsearch.geometry.Point;
|
||||
import org.elasticsearch.geometry.Polygon;
|
||||
import org.elasticsearch.geometry.Rectangle;
|
||||
|
||||
/**
|
||||
* Validator that only checks that altitude only shows up if ignoreZValue is set to true.
|
||||
|
@ -43,7 +43,7 @@ public class StandardValidator implements GeometryValidator {
|
|||
this.ignoreZValue = ignoreZValue;
|
||||
}
|
||||
|
||||
protected void checkAltitude(double zValue) {
|
||||
protected void checkZ(double zValue) {
|
||||
if (ignoreZValue == false && Double.isNaN(zValue) == false) {
|
||||
throw new IllegalArgumentException("found Z value [" + zValue + "] but [ignore_z_value] "
|
||||
+ "parameter is [" + ignoreZValue + "]");
|
||||
|
@ -57,7 +57,7 @@ public class StandardValidator implements GeometryValidator {
|
|||
|
||||
@Override
|
||||
public Void visit(Circle circle) throws RuntimeException {
|
||||
checkAltitude(circle.getAlt());
|
||||
checkZ(circle.getZ());
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ public class StandardValidator implements GeometryValidator {
|
|||
@Override
|
||||
public Void visit(Line line) throws RuntimeException {
|
||||
for (int i = 0; i < line.length(); i++) {
|
||||
checkAltitude(line.getAlt(i));
|
||||
checkZ(line.getZ(i));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ public class StandardValidator implements GeometryValidator {
|
|||
@Override
|
||||
public Void visit(LinearRing ring) throws RuntimeException {
|
||||
for (int i = 0; i < ring.length(); i++) {
|
||||
checkAltitude(ring.getAlt(i));
|
||||
checkZ(ring.getZ(i));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ public class StandardValidator implements GeometryValidator {
|
|||
|
||||
@Override
|
||||
public Void visit(Point point) throws RuntimeException {
|
||||
checkAltitude(point.getAlt());
|
||||
checkZ(point.getZ());
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -117,8 +117,8 @@ public class StandardValidator implements GeometryValidator {
|
|||
|
||||
@Override
|
||||
public Void visit(Rectangle rectangle) throws RuntimeException {
|
||||
checkAltitude(rectangle.getMinAlt());
|
||||
checkAltitude(rectangle.getMaxAlt());
|
||||
checkZ(rectangle.getMinZ());
|
||||
checkZ(rectangle.getMaxZ());
|
||||
return null;
|
||||
}
|
||||
});
|
|
@ -17,20 +17,20 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.utils;
|
||||
package org.elasticsearch.geometry.utils;
|
||||
|
||||
import org.elasticsearch.geo.geometry.Circle;
|
||||
import org.elasticsearch.geo.geometry.Geometry;
|
||||
import org.elasticsearch.geo.geometry.GeometryCollection;
|
||||
import org.elasticsearch.geo.geometry.GeometryVisitor;
|
||||
import org.elasticsearch.geo.geometry.Line;
|
||||
import org.elasticsearch.geo.geometry.LinearRing;
|
||||
import org.elasticsearch.geo.geometry.MultiLine;
|
||||
import org.elasticsearch.geo.geometry.MultiPoint;
|
||||
import org.elasticsearch.geo.geometry.MultiPolygon;
|
||||
import org.elasticsearch.geo.geometry.Point;
|
||||
import org.elasticsearch.geo.geometry.Polygon;
|
||||
import org.elasticsearch.geo.geometry.Rectangle;
|
||||
import org.elasticsearch.geometry.Circle;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.GeometryCollection;
|
||||
import org.elasticsearch.geometry.GeometryVisitor;
|
||||
import org.elasticsearch.geometry.Line;
|
||||
import org.elasticsearch.geometry.LinearRing;
|
||||
import org.elasticsearch.geometry.MultiLine;
|
||||
import org.elasticsearch.geometry.MultiPoint;
|
||||
import org.elasticsearch.geometry.MultiPolygon;
|
||||
import org.elasticsearch.geometry.Point;
|
||||
import org.elasticsearch.geometry.Polygon;
|
||||
import org.elasticsearch.geometry.Rectangle;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StreamTokenizer;
|
||||
|
@ -45,6 +45,9 @@ import java.util.Locale;
|
|||
* Utility class for converting to and from WKT
|
||||
*/
|
||||
public class WellKnownText {
|
||||
/* The instance of WKT serializer that coerces values and accepts Z component */
|
||||
public static final WellKnownText INSTANCE = new WellKnownText(true, new StandardValidator(true));
|
||||
|
||||
public static final String EMPTY = "EMPTY";
|
||||
public static final String SPACE = " ";
|
||||
public static final String LPAREN = "(";
|
||||
|
@ -80,12 +83,12 @@ public class WellKnownText {
|
|||
@Override
|
||||
public Void visit(Circle circle) {
|
||||
sb.append(LPAREN);
|
||||
visitPoint(circle.getLon(), circle.getLat(), Double.NaN);
|
||||
visitPoint(circle.getX(), circle.getY(), Double.NaN);
|
||||
sb.append(SPACE);
|
||||
sb.append(circle.getRadiusMeters());
|
||||
if (circle.hasAlt()) {
|
||||
if (circle.hasZ()) {
|
||||
sb.append(SPACE);
|
||||
sb.append(circle.getAlt());
|
||||
sb.append(circle.getZ());
|
||||
}
|
||||
sb.append(RPAREN);
|
||||
return null;
|
||||
|
@ -110,11 +113,11 @@ public class WellKnownText {
|
|||
@Override
|
||||
public Void visit(Line line) {
|
||||
sb.append(LPAREN);
|
||||
visitPoint(line.getLon(0), line.getLat(0), line.getAlt(0));
|
||||
visitPoint(line.getX(0), line.getY(0), line.getZ(0));
|
||||
for (int i = 1; i < line.length(); ++i) {
|
||||
sb.append(COMMA);
|
||||
sb.append(SPACE);
|
||||
visitPoint(line.getLon(i), line.getLat(i), line.getAlt(i));
|
||||
visitPoint(line.getX(i), line.getY(i), line.getZ(i));
|
||||
}
|
||||
sb.append(RPAREN);
|
||||
return null;
|
||||
|
@ -139,12 +142,12 @@ public class WellKnownText {
|
|||
}
|
||||
// walk through coordinates:
|
||||
sb.append(LPAREN);
|
||||
visitPoint(multiPoint.get(0).getLon(), multiPoint.get(0).getLat(), multiPoint.get(0).getAlt());
|
||||
visitPoint(multiPoint.get(0).getX(), multiPoint.get(0).getY(), multiPoint.get(0).getZ());
|
||||
for (int i = 1; i < multiPoint.size(); ++i) {
|
||||
sb.append(COMMA);
|
||||
sb.append(SPACE);
|
||||
Point point = multiPoint.get(i);
|
||||
visitPoint(point.getLon(), point.getLat(), point.getAlt());
|
||||
visitPoint(point.getX(), point.getY(), point.getZ());
|
||||
}
|
||||
sb.append(RPAREN);
|
||||
return null;
|
||||
|
@ -162,7 +165,7 @@ public class WellKnownText {
|
|||
sb.append(EMPTY);
|
||||
} else {
|
||||
sb.append(LPAREN);
|
||||
visitPoint(point.getLon(), point.getLat(), point.getAlt());
|
||||
visitPoint(point.getX(), point.getY(), point.getZ());
|
||||
sb.append(RPAREN);
|
||||
}
|
||||
return null;
|
||||
|
@ -206,17 +209,24 @@ public class WellKnownText {
|
|||
public Void visit(Rectangle rectangle) {
|
||||
sb.append(LPAREN);
|
||||
// minX, maxX, maxY, minY
|
||||
// TODO: Add 3D support
|
||||
sb.append(rectangle.getMinLon());
|
||||
sb.append(rectangle.getMinX());
|
||||
sb.append(COMMA);
|
||||
sb.append(SPACE);
|
||||
sb.append(rectangle.getMaxLon());
|
||||
sb.append(rectangle.getMaxX());
|
||||
sb.append(COMMA);
|
||||
sb.append(SPACE);
|
||||
sb.append(rectangle.getMaxLat());
|
||||
sb.append(rectangle.getMaxY());
|
||||
sb.append(COMMA);
|
||||
sb.append(SPACE);
|
||||
sb.append(rectangle.getMinLat());
|
||||
sb.append(rectangle.getMinY());
|
||||
if (rectangle.hasZ()) {
|
||||
sb.append(COMMA);
|
||||
sb.append(SPACE);
|
||||
sb.append(rectangle.getMinZ());
|
||||
sb.append(COMMA);
|
||||
sb.append(SPACE);
|
||||
sb.append(rectangle.getMaxZ());
|
||||
}
|
||||
sb.append(RPAREN);
|
||||
return null;
|
||||
}
|
||||
|
@ -298,9 +308,9 @@ public class WellKnownText {
|
|||
double lat = nextNumber(stream);
|
||||
Point pt;
|
||||
if (isNumberNext(stream)) {
|
||||
pt = new Point(lat, lon, nextNumber(stream));
|
||||
pt = new Point(lon, lat, nextNumber(stream));
|
||||
} else {
|
||||
pt = new Point(lat, lon);
|
||||
pt = new Point(lon, lat);
|
||||
}
|
||||
nextCloser(stream);
|
||||
return pt;
|
||||
|
@ -338,9 +348,9 @@ public class WellKnownText {
|
|||
parseCoordinates(stream, lats, lons, alts);
|
||||
for (int i = 0; i < lats.size(); i++) {
|
||||
if (alts.isEmpty()) {
|
||||
points.add(new Point(lats.get(i), lons.get(i)));
|
||||
points.add(new Point(lons.get(i), lats.get(i)));
|
||||
} else {
|
||||
points.add(new Point(lats.get(i), lons.get(i), alts.get(i)));
|
||||
points.add(new Point(lons.get(i), lats.get(i), alts.get(i)));
|
||||
}
|
||||
}
|
||||
return new MultiPoint(Collections.unmodifiableList(points));
|
||||
|
@ -356,9 +366,9 @@ public class WellKnownText {
|
|||
ArrayList<Double> alts = new ArrayList<>();
|
||||
parseCoordinates(stream, lats, lons, alts);
|
||||
if (alts.isEmpty()) {
|
||||
return new Line(toArray(lats), toArray(lons));
|
||||
return new Line(toArray(lons), toArray(lats));
|
||||
} else {
|
||||
return new Line(toArray(lats), toArray(lons), toArray(alts));
|
||||
return new Line(toArray(lons), toArray(lats), toArray(alts));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -383,9 +393,9 @@ public class WellKnownText {
|
|||
parseCoordinates(stream, lats, lons, alts);
|
||||
closeLinearRingIfCoerced(lats, lons, alts);
|
||||
if (alts.isEmpty()) {
|
||||
return new LinearRing(toArray(lats), toArray(lons));
|
||||
return new LinearRing(toArray(lons), toArray(lats));
|
||||
} else {
|
||||
return new LinearRing(toArray(lats), toArray(lons), toArray(alts));
|
||||
return new LinearRing(toArray(lons), toArray(lats), toArray(alts));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -405,9 +415,9 @@ public class WellKnownText {
|
|||
closeLinearRingIfCoerced(lats, lons, alts);
|
||||
LinearRing shell;
|
||||
if (alts.isEmpty()) {
|
||||
shell = new LinearRing(toArray(lats), toArray(lons));
|
||||
shell = new LinearRing(toArray(lons), toArray(lats));
|
||||
} else {
|
||||
shell = new LinearRing(toArray(lats), toArray(lons), toArray(alts));
|
||||
shell = new LinearRing(toArray(lons), toArray(lats), toArray(alts));
|
||||
}
|
||||
if (holes.isEmpty()) {
|
||||
return new Polygon(shell);
|
||||
|
@ -460,7 +470,7 @@ public class WellKnownText {
|
|||
nextComma(stream);
|
||||
double minLat = nextNumber(stream);
|
||||
nextCloser(stream);
|
||||
return new Rectangle(minLat, maxLat, minLon, maxLon);
|
||||
return new Rectangle(minLon, maxLon, maxLat, minLat);
|
||||
}
|
||||
|
||||
|
||||
|
@ -475,7 +485,7 @@ public class WellKnownText {
|
|||
if (isNumberNext(stream) == true) {
|
||||
alt = nextNumber(stream);
|
||||
}
|
||||
Circle circle = new Circle(lat, lon, alt, radius);
|
||||
Circle circle = new Circle(lon, lat, alt, radius);
|
||||
nextCloser(stream);
|
||||
return circle;
|
||||
}
|
|
@ -17,13 +17,13 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
import org.elasticsearch.test.AbstractWireTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -36,7 +36,7 @@ abstract class BaseGeometryTestCase<T extends Geometry> extends AbstractWireTest
|
|||
protected final T createTestInstance() {
|
||||
boolean hasAlt = randomBoolean();
|
||||
T obj = createTestInstance(hasAlt);
|
||||
assertEquals(hasAlt, obj.hasAlt());
|
||||
assertEquals(hasAlt, obj.hasZ());
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ abstract class BaseGeometryTestCase<T extends Geometry> extends AbstractWireTest
|
|||
private Object verify(Geometry geometry, String expectedClass) {
|
||||
assertFalse("Visitor should be called only once", called.getAndSet(true));
|
||||
assertSame(geom, geometry);
|
||||
assertEquals(geometry.getClass().getName(), "org.elasticsearch.geo.geometry." + expectedClass);
|
||||
assertEquals(geometry.getClass().getName(), "org.elasticsearch.geometry." + expectedClass);
|
||||
return "result";
|
||||
}
|
||||
|
|
@ -17,12 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.GeometryValidator;
|
||||
import org.elasticsearch.geo.utils.StandardValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.GeometryValidator;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
|
@ -31,20 +31,20 @@ public class CircleTests extends BaseGeometryTestCase<Circle> {
|
|||
@Override
|
||||
protected Circle createTestInstance(boolean hasAlt) {
|
||||
if (hasAlt) {
|
||||
return new Circle(randomDoubleBetween(-90, 90, true), randomDoubleBetween(-180, 180, true), randomDouble(),
|
||||
return new Circle(randomDoubleBetween(-180, 180, true), randomDoubleBetween(-90, 90, true), randomDouble(),
|
||||
randomDoubleBetween(0, 100, false));
|
||||
} else {
|
||||
return new Circle(randomDoubleBetween(-90, 90, true), randomDoubleBetween(-180, 180, true), randomDoubleBetween(0, 100, false));
|
||||
return new Circle(randomDoubleBetween(-180, 180, true), randomDoubleBetween(-90, 90, true), randomDoubleBetween(0, 100, false));
|
||||
}
|
||||
}
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true));
|
||||
assertEquals("circle (20.0 10.0 15.0)", wkt.toWKT(new Circle(10, 20, 15)));
|
||||
assertEquals(new Circle(10, 20, 15), wkt.fromWKT("circle (20.0 10.0 15.0)"));
|
||||
assertEquals("circle (20.0 10.0 15.0)", wkt.toWKT(new Circle(20, 10, 15)));
|
||||
assertEquals(new Circle(20, 10, 15), wkt.fromWKT("circle (20.0 10.0 15.0)"));
|
||||
|
||||
assertEquals("circle (20.0 10.0 15.0 25.0)", wkt.toWKT(new Circle(10, 20, 25, 15)));
|
||||
assertEquals(new Circle(10, 20, 25, 15), wkt.fromWKT("circle (20.0 10.0 15.0 25.0)"));
|
||||
assertEquals("circle (20.0 10.0 15.0 25.0)", wkt.toWKT(new Circle(20, 10, 25, 15)));
|
||||
assertEquals(new Circle(20, 10, 25, 15), wkt.fromWKT("circle (20.0 10.0 15.0 25.0)"));
|
||||
|
||||
assertEquals("circle EMPTY", wkt.toWKT(Circle.EMPTY));
|
||||
assertEquals(Circle.EMPTY, wkt.fromWKT("circle EMPTY)"));
|
||||
|
@ -52,18 +52,18 @@ public class CircleTests extends BaseGeometryTestCase<Circle> {
|
|||
|
||||
public void testInitValidation() {
|
||||
GeometryValidator validator = new GeographyValidator(true);
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Circle(10, 20, -1)));
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Circle(20, 10, -1)));
|
||||
assertEquals("Circle radius [-1.0] cannot be negative", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Circle(100, 20, 1)));
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Circle(20, 100, 1)));
|
||||
assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Circle(10, 200, 1)));
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Circle(200, 10, 1)));
|
||||
assertEquals("invalid longitude 200.0; must be between -180.0 and 180.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(new Circle(10, 200, 1, 20)));
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(new Circle(200, 10, 1, 20)));
|
||||
assertEquals("found Z value [1.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
|
||||
|
||||
new StandardValidator(true).validate(new Circle(10, 200, 1, 20));
|
||||
new StandardValidator(true).validate(new Circle(200, 10, 1, 20));
|
||||
}
|
||||
}
|
|
@ -17,12 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.GeometryTestUtils;
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.StandardValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
|
@ -35,14 +35,12 @@ public class GeometryCollectionTests extends BaseGeometryTestCase<GeometryCollec
|
|||
return GeometryTestUtils.randomGeometryCollection(hasAlt);
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true));
|
||||
assertEquals("geometrycollection (point (20.0 10.0),point EMPTY)",
|
||||
wkt.toWKT(new GeometryCollection<Geometry>(Arrays.asList(new Point(10, 20), Point.EMPTY))));
|
||||
wkt.toWKT(new GeometryCollection<Geometry>(Arrays.asList(new Point(20, 10), Point.EMPTY))));
|
||||
|
||||
assertEquals(new GeometryCollection<Geometry>(Arrays.asList(new Point(10, 20), Point.EMPTY)),
|
||||
assertEquals(new GeometryCollection<Geometry>(Arrays.asList(new Point(20, 10), Point.EMPTY)),
|
||||
wkt.fromWKT("geometrycollection (point (20.0 10.0),point EMPTY)"));
|
||||
|
||||
assertEquals("geometrycollection EMPTY", wkt.toWKT(GeometryCollection.EMPTY));
|
||||
|
@ -58,13 +56,13 @@ public class GeometryCollectionTests extends BaseGeometryTestCase<GeometryCollec
|
|||
assertEquals("the list of shapes cannot be null or empty", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new GeometryCollection<>(
|
||||
Arrays.asList(new Point(10, 20), new Point(10, 20, 30))));
|
||||
Arrays.asList(new Point(20, 10), new Point(20, 10, 30))));
|
||||
assertEquals("all elements of the collection should have the same number of dimension", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(
|
||||
new GeometryCollection<Geometry>(Collections.singletonList(new Point(10, 20, 30)))));
|
||||
new GeometryCollection<Geometry>(Collections.singletonList(new Point(20, 10, 30)))));
|
||||
assertEquals("found Z value [30.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
|
||||
|
||||
new StandardValidator(true).validate(new GeometryCollection<Geometry>(Collections.singletonList(new Point(10, 20, 30))));
|
||||
new StandardValidator(true).validate(new GeometryCollection<Geometry>(Collections.singletonList(new Point(20, 10, 30))));
|
||||
}
|
||||
}
|
|
@ -17,11 +17,11 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.GeometryValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.GeometryValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class GeometryValidatorTests extends ESTestCase {
|
|
@ -17,13 +17,13 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.GeometryTestUtils;
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.GeometryValidator;
|
||||
import org.elasticsearch.geo.utils.StandardValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.GeometryValidator;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
|
@ -36,12 +36,12 @@ public class LineTests extends BaseGeometryTestCase<Line> {
|
|||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true));
|
||||
assertEquals("linestring (3.0 1.0, 4.0 2.0)", wkt.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4})));
|
||||
assertEquals(new Line(new double[]{1, 2}, new double[]{3, 4}), wkt.fromWKT("linestring (3 1, 4 2)"));
|
||||
assertEquals("linestring (3.0 1.0, 4.0 2.0)", wkt.toWKT(new Line(new double[]{3, 4}, new double[]{1, 2})));
|
||||
assertEquals(new Line(new double[]{3, 4}, new double[]{1, 2}), wkt.fromWKT("linestring (3 1, 4 2)"));
|
||||
|
||||
assertEquals("linestring (3.0 1.0 5.0, 4.0 2.0 6.0)", wkt.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4},
|
||||
assertEquals("linestring (3.0 1.0 5.0, 4.0 2.0 6.0)", wkt.toWKT(new Line(new double[]{3, 4}, new double[]{1, 2},
|
||||
new double[]{5, 6})));
|
||||
assertEquals(new Line(new double[]{1, 2}, new double[]{3, 4}, new double[]{6, 5}),
|
||||
assertEquals(new Line(new double[]{3, 4}, new double[]{1, 2}, new double[]{6, 5}),
|
||||
wkt.fromWKT("linestring (3 1 6, 4 2 5)"));
|
||||
|
||||
assertEquals("linestring EMPTY", wkt.toWKT(Line.EMPTY));
|
||||
|
@ -51,22 +51,22 @@ public class LineTests extends BaseGeometryTestCase<Line> {
|
|||
public void testInitValidation() {
|
||||
GeometryValidator validator = new GeographyValidator(true);
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new Line(new double[]{1}, new double[]{3})));
|
||||
() -> validator.validate(new Line(new double[]{3}, new double[]{1})));
|
||||
assertEquals("at least two points in the line is required", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new Line(new double[]{1, 2, 3, 1}, new double[]{3, 4, 500, 3})));
|
||||
() -> validator.validate(new Line(new double[]{3, 4, 500, 3}, new double[]{1, 2, 3, 1})));
|
||||
assertEquals("invalid longitude 500.0; must be between -180.0 and 180.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new Line(new double[]{1, 100, 3, 1}, new double[]{3, 4, 5, 3})));
|
||||
() -> validator.validate(new Line(new double[]{3, 4, 5, 3}, new double[]{1, 100, 3, 1})));
|
||||
assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(
|
||||
new Line(new double[]{1, 2}, new double[]{3, 4}, new double[]{6, 5})));
|
||||
new Line(new double[]{3, 4}, new double[]{1, 2}, new double[]{6, 5})));
|
||||
assertEquals("found Z value [6.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
|
||||
|
||||
new StandardValidator(true).validate(new Line(new double[]{1, 2}, new double[]{3, 4}, new double[]{6, 5}));
|
||||
new StandardValidator(true).validate(new Line(new double[]{3, 4}, new double[]{1, 2}, new double[]{6, 5}));
|
||||
}
|
||||
|
||||
public void testWKTValidation() {
|
|
@ -17,12 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.GeometryValidator;
|
||||
import org.elasticsearch.geo.utils.StandardValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.GeometryValidator;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class LinearRingTests extends ESTestCase {
|
||||
|
@ -30,44 +30,44 @@ public class LinearRingTests extends ESTestCase {
|
|||
public void testBasicSerialization() {
|
||||
UnsupportedOperationException ex = expectThrows(UnsupportedOperationException.class,
|
||||
() -> new WellKnownText(true, new GeographyValidator(true))
|
||||
.toWKT(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})));
|
||||
.toWKT(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1})));
|
||||
assertEquals("line ring cannot be serialized using WKT", ex.getMessage());
|
||||
}
|
||||
|
||||
public void testInitValidation() {
|
||||
GeometryValidator validator = new GeographyValidator(true);
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new LinearRing(new double[]{1, 2, 3}, new double[]{3, 4, 5})));
|
||||
assertEquals("first and last points of the linear ring must be the same (it must close itself): lats[0]=1.0 lats[2]=3.0 " +
|
||||
"lons[0]=3.0 lons[2]=5.0",
|
||||
() -> validator.validate(new LinearRing(new double[]{3, 4, 5}, new double[]{1, 2, 3})));
|
||||
assertEquals("first and last points of the linear ring must be the same (it must close itself): x[0]=3.0 x[2]=5.0 y[0]=1.0 " +
|
||||
"y[2]=3.0",
|
||||
ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new LinearRing(new double[]{1, 2, 1}, new double[]{3, 4, 3}, new double[]{1, 2, 3})));
|
||||
assertEquals("first and last points of the linear ring must be the same (it must close itself): lats[0]=1.0 lats[2]=1.0 " +
|
||||
"lons[0]=3.0 lons[2]=3.0 alts[0]=1.0 alts[2]=3.0",
|
||||
() -> validator.validate(new LinearRing(new double[]{3, 4, 3}, new double[]{1, 2, 1}, new double[]{1, 2, 3})));
|
||||
assertEquals("first and last points of the linear ring must be the same (it must close itself): x[0]=3.0 x[2]=3.0 y[0]=1.0 " +
|
||||
"y[2]=1.0 z[0]=1.0 z[2]=3.0",
|
||||
ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new LinearRing(new double[]{1}, new double[]{3})));
|
||||
() -> validator.validate(new LinearRing(new double[]{3}, new double[]{1})));
|
||||
assertEquals("at least two points in the line is required", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 500, 3})));
|
||||
() -> validator.validate(new LinearRing(new double[]{3, 4, 500, 3}, new double[]{1, 2, 3, 1})));
|
||||
assertEquals("invalid longitude 500.0; must be between -180.0 and 180.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new LinearRing(new double[]{1, 100, 3, 1}, new double[]{3, 4, 5, 3})));
|
||||
() -> validator.validate(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 100, 3, 1})));
|
||||
assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(
|
||||
new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{1, 1, 1, 1})));
|
||||
new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}, new double[]{1, 1, 1, 1})));
|
||||
assertEquals("found Z value [1.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
|
||||
|
||||
new StandardValidator(true).validate(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{1, 1, 1, 1}));
|
||||
new StandardValidator(true).validate(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}, new double[]{1, 1, 1, 1}));
|
||||
}
|
||||
|
||||
public void testVisitor() {
|
||||
BaseGeometryTestCase.testVisitor(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}));
|
||||
BaseGeometryTestCase.testVisitor(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}));
|
||||
}
|
||||
}
|
|
@ -17,12 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.GeometryTestUtils;
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.StandardValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
|
@ -45,8 +45,8 @@ public class MultiLineTests extends BaseGeometryTestCase<MultiLine> {
|
|||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true));
|
||||
assertEquals("multilinestring ((3.0 1.0, 4.0 2.0))", wkt.toWKT(
|
||||
new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4})))));
|
||||
assertEquals(new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4}))),
|
||||
new MultiLine(Collections.singletonList(new Line(new double[]{3, 4}, new double[]{1, 2})))));
|
||||
assertEquals(new MultiLine(Collections.singletonList(new Line(new double[]{3, 4}, new double[]{1, 2}))),
|
||||
wkt.fromWKT("multilinestring ((3 1, 4 2))"));
|
||||
|
||||
assertEquals("multilinestring EMPTY", wkt.toWKT(MultiLine.EMPTY));
|
||||
|
@ -55,10 +55,10 @@ public class MultiLineTests extends BaseGeometryTestCase<MultiLine> {
|
|||
|
||||
public void testValidation() {
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(
|
||||
new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4}, new double[]{6, 5})))));
|
||||
new MultiLine(Collections.singletonList(new Line(new double[]{3, 4}, new double[]{1, 2}, new double[]{6, 5})))));
|
||||
assertEquals("found Z value [6.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
|
||||
|
||||
new StandardValidator(true).validate(
|
||||
new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4}, new double[]{6, 5}))));
|
||||
new MultiLine(Collections.singletonList(new Line(new double[]{3, 4}, new double[]{1, 2}, new double[]{6, 5}))));
|
||||
}
|
||||
}
|
|
@ -17,12 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.GeometryTestUtils;
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.StandardValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
|
@ -46,18 +46,18 @@ public class MultiPointTests extends BaseGeometryTestCase<MultiPoint> {
|
|||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true));
|
||||
assertEquals("multipoint (2.0 1.0)", wkt.toWKT(
|
||||
new MultiPoint(Collections.singletonList(new Point(1, 2)))));
|
||||
assertEquals(new MultiPoint(Collections.singletonList(new Point(1 ,2))),
|
||||
new MultiPoint(Collections.singletonList(new Point(2, 1)))));
|
||||
assertEquals(new MultiPoint(Collections.singletonList(new Point(2, 1))),
|
||||
wkt.fromWKT("multipoint (2 1)"));
|
||||
|
||||
assertEquals("multipoint (2.0 1.0, 3.0 4.0)",
|
||||
wkt.toWKT(new MultiPoint(Arrays.asList(new Point(1, 2), new Point(4, 3)))));
|
||||
assertEquals(new MultiPoint(Arrays.asList(new Point(1, 2), new Point(4, 3))),
|
||||
wkt.toWKT(new MultiPoint(Arrays.asList(new Point(2, 1), new Point(3, 4)))));
|
||||
assertEquals(new MultiPoint(Arrays.asList(new Point(2, 1), new Point(3, 4))),
|
||||
wkt.fromWKT("multipoint (2 1, 3 4)"));
|
||||
|
||||
assertEquals("multipoint (2.0 1.0 10.0, 3.0 4.0 20.0)",
|
||||
wkt.toWKT(new MultiPoint(Arrays.asList(new Point(1, 2, 10), new Point(4, 3, 20)))));
|
||||
assertEquals(new MultiPoint(Arrays.asList(new Point(1, 2, 10), new Point(4, 3, 20))),
|
||||
wkt.toWKT(new MultiPoint(Arrays.asList(new Point(2, 1, 10), new Point(3, 4, 20)))));
|
||||
assertEquals(new MultiPoint(Arrays.asList(new Point(2, 1, 10), new Point(3, 4, 20))),
|
||||
wkt.fromWKT("multipoint (2 1 10, 3 4 20)"));
|
||||
|
||||
assertEquals("multipoint EMPTY", wkt.toWKT(MultiPoint.EMPTY));
|
||||
|
@ -66,9 +66,9 @@ public class MultiPointTests extends BaseGeometryTestCase<MultiPoint> {
|
|||
|
||||
public void testValidation() {
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(
|
||||
new MultiPoint(Collections.singletonList(new Point(1, 2 ,3)))));
|
||||
new MultiPoint(Collections.singletonList(new Point(2, 1, 3)))));
|
||||
assertEquals("found Z value [3.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
|
||||
|
||||
new StandardValidator(true).validate(new MultiPoint(Collections.singletonList(new Point(1, 2 ,3))));
|
||||
new StandardValidator(true).validate(new MultiPoint(Collections.singletonList(new Point(2, 1, 3))));
|
||||
}
|
||||
}
|
|
@ -17,12 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.GeometryTestUtils;
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.StandardValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
|
@ -46,9 +46,9 @@ public class MultiPolygonTests extends BaseGeometryTestCase<MultiPolygon> {
|
|||
WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true));
|
||||
assertEquals("multipolygon (((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0)))",
|
||||
wkt.toWKT(new MultiPolygon(Collections.singletonList(
|
||||
new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))))));
|
||||
new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}))))));
|
||||
assertEquals(new MultiPolygon(Collections.singletonList(
|
||||
new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})))),
|
||||
new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1})))),
|
||||
wkt.fromWKT("multipolygon (((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0)))"));
|
||||
|
||||
assertEquals("multipolygon EMPTY", wkt.toWKT(MultiPolygon.EMPTY));
|
||||
|
@ -58,12 +58,12 @@ public class MultiPolygonTests extends BaseGeometryTestCase<MultiPolygon> {
|
|||
public void testValidation() {
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(
|
||||
new MultiPolygon(Collections.singletonList(
|
||||
new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}))
|
||||
new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}, new double[]{1, 2, 3, 1}))
|
||||
))));
|
||||
assertEquals("found Z value [1.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
|
||||
|
||||
new StandardValidator(true).validate(
|
||||
new MultiPolygon(Collections.singletonList(
|
||||
new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1})))));
|
||||
new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}, new double[]{1, 2, 3, 1})))));
|
||||
}
|
||||
}
|
|
@ -17,13 +17,13 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.GeometryTestUtils;
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.GeometryValidator;
|
||||
import org.elasticsearch.geo.utils.StandardValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.GeometryValidator;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
|
@ -36,11 +36,11 @@ public class PointTests extends BaseGeometryTestCase<Point> {
|
|||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true));
|
||||
assertEquals("point (20.0 10.0)", wkt.toWKT(new Point(10, 20)));
|
||||
assertEquals(new Point(10, 20), wkt.fromWKT("point (20.0 10.0)"));
|
||||
assertEquals("point (20.0 10.0)", wkt.toWKT(new Point(20, 10)));
|
||||
assertEquals(new Point(20, 10), wkt.fromWKT("point (20.0 10.0)"));
|
||||
|
||||
assertEquals("point (20.0 10.0 100.0)", wkt.toWKT(new Point(10, 20, 100)));
|
||||
assertEquals(new Point(10, 20, 100), wkt.fromWKT("point (20.0 10.0 100.0)"));
|
||||
assertEquals("point (20.0 10.0 100.0)", wkt.toWKT(new Point(20, 10, 100)));
|
||||
assertEquals(new Point(20, 10, 100), wkt.fromWKT("point (20.0 10.0 100.0)"));
|
||||
|
||||
assertEquals("point EMPTY", wkt.toWKT(Point.EMPTY));
|
||||
assertEquals(Point.EMPTY, wkt.fromWKT("point EMPTY)"));
|
||||
|
@ -48,16 +48,16 @@ public class PointTests extends BaseGeometryTestCase<Point> {
|
|||
|
||||
public void testInitValidation() {
|
||||
GeometryValidator validator = new GeographyValidator(true);
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Point(100, 10)));
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Point(10, 100)));
|
||||
assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Point(10, 500)));
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Point(500, 10)));
|
||||
assertEquals("invalid longitude 500.0; must be between -180.0 and 180.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(new Point(1, 2, 3)));
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(new Point(2, 1, 3)));
|
||||
assertEquals("found Z value [3.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
|
||||
|
||||
new StandardValidator(true).validate(new Point(1, 2, 3));
|
||||
new StandardValidator(true).validate(new Point(2, 1, 3));
|
||||
}
|
||||
|
||||
public void testWKTValidation() {
|
|
@ -17,12 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.GeometryTestUtils;
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.StandardValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
|
@ -37,22 +37,22 @@ public class PolygonTests extends BaseGeometryTestCase<Polygon> {
|
|||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true));
|
||||
assertEquals("polygon ((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0))",
|
||||
wkt.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))));
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})),
|
||||
wkt.toWKT(new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}))));
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1})),
|
||||
wkt.fromWKT("polygon ((3 1, 4 2, 5 3, 3 1))"));
|
||||
|
||||
assertEquals("polygon ((3.0 1.0 5.0, 4.0 2.0 4.0, 5.0 3.0 3.0, 3.0 1.0 5.0))",
|
||||
wkt.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5}))));
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5})),
|
||||
wkt.toWKT(new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}, new double[]{5, 4, 3, 5}))));
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}, new double[]{5, 4, 3, 5})),
|
||||
wkt.fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3, 3 1 5))"));
|
||||
|
||||
// Auto closing in coerce mode
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})),
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1})),
|
||||
wkt.fromWKT("polygon ((3 1, 4 2, 5 3))"));
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5})),
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}, new double[]{5, 4, 3, 5})),
|
||||
wkt.fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3))"));
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}),
|
||||
Collections.singletonList(new LinearRing(new double[]{1.5, 1.5, 1.0, 1.5}, new double[]{0.5, 2.5, 2.0, 0.5}))),
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}),
|
||||
Collections.singletonList(new LinearRing(new double[]{0.5, 2.5, 2.0, 0.5}, new double[]{1.5, 1.5, 1.0, 1.5}))),
|
||||
wkt.fromWKT("polygon ((3 1, 4 2, 5 3, 3 1), (0.5 1.5, 2.5 1.5, 2.0 1.0))"));
|
||||
|
||||
assertEquals("polygon EMPTY", wkt.toWKT(Polygon.EMPTY));
|
||||
|
@ -61,31 +61,31 @@ public class PolygonTests extends BaseGeometryTestCase<Polygon> {
|
|||
|
||||
public void testInitValidation() {
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> new Polygon(new LinearRing(new double[]{1, 2, 1}, new double[]{3, 4, 3})));
|
||||
() -> new Polygon(new LinearRing(new double[]{3, 4, 3}, new double[]{1, 2, 1})));
|
||||
assertEquals("at least 4 polygon points required", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}), null));
|
||||
() -> new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}), null));
|
||||
assertEquals("holes must not be null", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5}),
|
||||
Collections.singletonList(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))));
|
||||
() -> new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}, new double[]{5, 4, 3, 5}),
|
||||
Collections.singletonList(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}))));
|
||||
assertEquals("holes must have the same number of dimensions as the polygon", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(
|
||||
new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}))));
|
||||
new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}, new double[]{1, 2, 3, 1}))));
|
||||
assertEquals("found Z value [1.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
|
||||
|
||||
new StandardValidator(true).validate(
|
||||
new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1})));
|
||||
new Polygon(new LinearRing(new double[]{3, 4, 5, 3}, new double[]{1, 2, 3, 1}, new double[]{1, 2, 3, 1})));
|
||||
}
|
||||
|
||||
public void testWKTValidation() {
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> new WellKnownText(false, new GeographyValidator(true)).fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3))"));
|
||||
assertEquals("first and last points of the linear ring must be the same (it must close itself): " +
|
||||
"lats[0]=1.0 lats[2]=3.0 lons[0]=3.0 lons[2]=5.0 alts[0]=5.0 alts[2]=3.0", ex.getMessage());
|
||||
"x[0]=3.0 x[2]=5.0 y[0]=1.0 y[2]=3.0 z[0]=5.0 z[2]=3.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> new WellKnownText(randomBoolean(), new GeographyValidator(false)).fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3, 3 1 5))"));
|
||||
|
@ -95,6 +95,6 @@ public class PolygonTests extends BaseGeometryTestCase<Polygon> {
|
|||
() -> new WellKnownText(false, new GeographyValidator(randomBoolean())).fromWKT(
|
||||
"polygon ((3 1, 4 2, 5 3, 3 1), (0.5 1.5, 2.5 1.5, 2.0 1.0))"));
|
||||
assertEquals("first and last points of the linear ring must be the same (it must close itself): " +
|
||||
"lats[0]=1.5 lats[2]=1.0 lons[0]=0.5 lons[2]=2.0", ex.getMessage());
|
||||
"x[0]=0.5 x[2]=2.0 y[0]=1.5 y[2]=1.0", ex.getMessage());
|
||||
}
|
||||
}
|
|
@ -17,13 +17,13 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.geo.geometry;
|
||||
package org.elasticsearch.geometry;
|
||||
|
||||
import org.elasticsearch.geo.GeometryTestUtils;
|
||||
import org.elasticsearch.geo.utils.GeographyValidator;
|
||||
import org.elasticsearch.geo.utils.GeometryValidator;
|
||||
import org.elasticsearch.geo.utils.StandardValidator;
|
||||
import org.elasticsearch.geo.utils.WellKnownText;
|
||||
import org.elasticsearch.geometry.utils.GeographyValidator;
|
||||
import org.elasticsearch.geometry.utils.GeometryValidator;
|
||||
import org.elasticsearch.geometry.utils.StandardValidator;
|
||||
import org.elasticsearch.geometry.utils.WellKnownText;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
|
@ -37,8 +37,8 @@ public class RectangleTests extends BaseGeometryTestCase<Rectangle> {
|
|||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText(true, new GeographyValidator(true));
|
||||
assertEquals("bbox (10.0, 20.0, 40.0, 30.0)", wkt.toWKT(new Rectangle(30, 40, 10, 20)));
|
||||
assertEquals(new Rectangle(30, 40, 10, 20), wkt.fromWKT("bbox (10.0, 20.0, 40.0, 30.0)"));
|
||||
assertEquals("bbox (10.0, 20.0, 40.0, 30.0)", wkt.toWKT(new Rectangle(10, 20, 40, 30)));
|
||||
assertEquals(new Rectangle(10, 20, 40, 30), wkt.fromWKT("bbox (10.0, 20.0, 40.0, 30.0)"));
|
||||
|
||||
assertEquals("bbox EMPTY", wkt.toWKT(Rectangle.EMPTY));
|
||||
assertEquals(Rectangle.EMPTY, wkt.fromWKT("bbox EMPTY)"));
|
||||
|
@ -47,25 +47,25 @@ public class RectangleTests extends BaseGeometryTestCase<Rectangle> {
|
|||
public void testInitValidation() {
|
||||
GeometryValidator validator = new GeographyValidator(true);
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new Rectangle(1, 100, 2, 3)));
|
||||
() -> validator.validate(new Rectangle(2, 3, 100, 1)));
|
||||
assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new Rectangle(1, 2, 200, 3)));
|
||||
() -> validator.validate(new Rectangle(200, 3, 2, 1)));
|
||||
assertEquals("invalid longitude 200.0; must be between -180.0 and 180.0", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new Rectangle(2, 1, 2, 3)));
|
||||
assertEquals("max lat cannot be less than min lat", ex.getMessage());
|
||||
() -> validator.validate(new Rectangle(2, 3, 1, 2)));
|
||||
assertEquals("max y cannot be less than min x", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> validator.validate(new Rectangle(1, 2, 2, 3, 5, Double.NaN)));
|
||||
assertEquals("only one altitude value is specified", ex.getMessage());
|
||||
() -> validator.validate(new Rectangle(2, 3, 2, 1, 5, Double.NaN)));
|
||||
assertEquals("only one z value is specified", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> new StandardValidator(false).validate(
|
||||
new Rectangle(30, 40, 50, 10, 20, 60)));
|
||||
new Rectangle(50, 10, 40, 30, 20, 60)));
|
||||
assertEquals("found Z value [20.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
|
||||
|
||||
new StandardValidator(true).validate(new Rectangle(30, 40, 50, 10, 20, 60));
|
||||
new StandardValidator(true).validate(new Rectangle(50, 10, 40, 30, 20, 60));
|
||||
}
|
||||
}
|
|
@ -16,14 +16,14 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.geo.utils;
|
||||
package org.elasticsearch.geometry.utils;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.geo.geometry.Rectangle;
|
||||
import org.elasticsearch.geometry.Rectangle;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
/**
|
||||
* Tests for {@link org.elasticsearch.geo.utils.Geohash}
|
||||
* Tests for {@link Geohash}
|
||||
*/
|
||||
public class GeoHashTests extends ESTestCase {
|
||||
public void testGeohashAsLongRoutines() {
|
||||
|
@ -67,9 +67,9 @@ public class GeoHashTests extends ESTestCase {
|
|||
// check that the length is as expected
|
||||
double expectedLonDiff = 360.0 / (Math.pow(8.0, (level + 1) / 2) * Math.pow(4.0, level / 2));
|
||||
double expectedLatDiff = 180.0 / (Math.pow(4.0, (level + 1) / 2) * Math.pow(8.0, level / 2));
|
||||
assertEquals(expectedLonDiff, bbox.getMaxLon() - bbox.getMinLon(), 0.00001);
|
||||
assertEquals(expectedLatDiff, bbox.getMaxLat() - bbox.getMinLat(), 0.00001);
|
||||
assertEquals(hash, Geohash.stringEncode(bbox.getMinLon(), bbox.getMinLat(), level));
|
||||
assertEquals(expectedLonDiff, bbox.getMaxX() - bbox.getMinX(), 0.00001);
|
||||
assertEquals(expectedLatDiff, bbox.getMaxY() - bbox.getMinY(), 0.00001);
|
||||
assertEquals(hash, Geohash.stringEncode(bbox.getMinX(), bbox.getMinY(), level));
|
||||
}
|
||||
|
||||
public void testGeohashExtremes() {
|
||||
|
@ -101,7 +101,7 @@ public class GeoHashTests extends ESTestCase {
|
|||
|
||||
public void testNorthPoleBoundingBox() {
|
||||
Rectangle bbox = Geohash.toBoundingBox("zzbxfpgzupbx"); // Bounding box with maximum precision touching north pole
|
||||
assertEquals(90.0, bbox.getMaxLat(), 0.0000001); // Should be 90 degrees
|
||||
assertEquals(90.0, bbox.getMaxY(), 0.0000001); // Should be 90 degrees
|
||||
}
|
||||
|
||||
public void testInvalidGeohashes() {
|
|
@ -28,6 +28,8 @@ esplugin {
|
|||
testClusters.integTest {
|
||||
module file(project(':modules:mapper-extras').tasks.bundlePlugin.archiveFile)
|
||||
systemProperty 'es.scripting.update.ctx_in_params', 'false'
|
||||
// TODO: remove this once cname is prepended to transport.publish_address by default in 8.0
|
||||
systemProperty 'es.transport.cname_in_publish_address', 'true'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
|
|
|
@ -1,16 +1,19 @@
|
|||
{
|
||||
"scripts_painless_context": {
|
||||
"stability": "experimental",
|
||||
"methods": ["GET"],
|
||||
"url": {
|
||||
"paths": ["/_scripts/painless/_context"],
|
||||
"parts": {
|
||||
},
|
||||
"params": {
|
||||
"context" : {
|
||||
"type" : "string",
|
||||
"description" : "Select a specific context to retrieve API information about"
|
||||
"paths": [
|
||||
{
|
||||
"path": "/_scripts/painless/_context",
|
||||
"methods": ["GET"],
|
||||
"parts": {}
|
||||
}
|
||||
]
|
||||
},
|
||||
"params": {
|
||||
"context" : {
|
||||
"type" : "string",
|
||||
"description" : "Select a specific context to retrieve API information about"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,27 +1,33 @@
|
|||
{
|
||||
"cat.example": {
|
||||
"documentation": "",
|
||||
"documentation": {
|
||||
"url": "https://www.elastic.co/guide/en/elasticsearch/plugins/current/plugin-authors.html",
|
||||
"description": "Example"
|
||||
},
|
||||
"stability" : "stable",
|
||||
"methods": ["GET"],
|
||||
"url": {
|
||||
"paths": ["/_cat/example"],
|
||||
"parts": {},
|
||||
"params": {
|
||||
"help": {
|
||||
"type": "boolean",
|
||||
"description": "Return help information",
|
||||
"default": false
|
||||
},
|
||||
"v": {
|
||||
"type": "boolean",
|
||||
"description": "Verbose mode. Display column headers",
|
||||
"default": true
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "A simple message that will be printed out in the response",
|
||||
"default": "Hello from Cat Example action"
|
||||
"paths": [
|
||||
{
|
||||
"path" : "/_cat/example",
|
||||
"methods" : ["GET"]
|
||||
}
|
||||
]
|
||||
},
|
||||
"params": {
|
||||
"help": {
|
||||
"type": "boolean",
|
||||
"description": "Return help information",
|
||||
"default": false
|
||||
},
|
||||
"v": {
|
||||
"type": "boolean",
|
||||
"description": "Verbose mode. Display column headers",
|
||||
"default": true
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "A simple message that will be printed out in the response",
|
||||
"default": "Hello from Cat Example action"
|
||||
}
|
||||
},
|
||||
"body": null
|
||||
|
|
|
@ -23,10 +23,10 @@ esplugin {
|
|||
}
|
||||
|
||||
versions << [
|
||||
'tika': '1.19.1',
|
||||
'pdfbox': '2.0.12',
|
||||
'poi': '4.0.0',
|
||||
'mime4j': '0.8.2'
|
||||
'tika': '1.22',
|
||||
'pdfbox': '2.0.16',
|
||||
'poi': '4.0.1',
|
||||
'mime4j': '0.8.3'
|
||||
]
|
||||
|
||||
dependencies {
|
||||
|
@ -66,6 +66,8 @@ dependencies {
|
|||
// Outlook documents
|
||||
compile "org.apache.james:apache-mime4j-core:${versions.mime4j}"
|
||||
compile "org.apache.james:apache-mime4j-dom:${versions.mime4j}"
|
||||
// EPUB books
|
||||
compile 'org.apache.commons:commons-lang3:3.9'
|
||||
}
|
||||
|
||||
dependencyLicenses {
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
94919d81969c67c5894646338bf10fbc35f5a946
|
|
@ -0,0 +1 @@
|
|||
1179b56c9919c1a8e20d3a528ee4c6cee19bcbe0
|
|
@ -1 +0,0 @@
|
|||
32c9a9afe84eca86a3b0b3c66a956ced249ceade
|
|
@ -0,0 +1 @@
|
|||
e80733714eb6a70895bfc74a9528c658504c2c83
|
|
@ -0,0 +1 @@
|
|||
0122c7cee69b53ed4a7681c03d4ee4c0e2765da5
|
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,5 @@
|
|||
Apache Commons Lang
|
||||
Copyright 2001-2019 The Apache Software Foundation
|
||||
|
||||
This product includes software developed at
|
||||
The Apache Software Foundation (http://www.apache.org/).
|
|
@ -1 +0,0 @@
|
|||
566fd1d6b25012bb82078da08b82e6d0ba8c884a
|
|
@ -0,0 +1 @@
|
|||
3f7819279a0b90a01b07a870d1d27dffd8de24db
|
|
@ -1 +0,0 @@
|
|||
a7311cd267c19e1ba8154b076a63d29537154784
|
|
@ -0,0 +1 @@
|
|||
5dce5e41fc472d02800df5ef060a1f3a58c36902
|
|
@ -1 +0,0 @@
|
|||
7ddb9b983ed682c93a986e8bb596d5935b13086c
|
|
@ -0,0 +1 @@
|
|||
d8eaa341687a7dc48048d964d0d53238959ca9b5
|
|
@ -1 +0,0 @@
|
|||
f3fa9c2bd64eb3ec15378de960a07d077ae5b26d
|
|
@ -0,0 +1 @@
|
|||
9ec84728bf4236b8f9ec7fef3fe1b705eef2b408
|
|
@ -1 +0,0 @@
|
|||
125f9ccd2cf652fa4169b1c30e9023362e23324f
|
|
@ -0,0 +1 @@
|
|||
d2a066340008d36cb289b71f0f7b6ad562940644
|
|
@ -1 +0,0 @@
|
|||
1038d3bb1ec34e93c184b4c5b690e2f51c6f7a60
|
|
@ -0,0 +1 @@
|
|||
89b1ce1b932338204ffa3fab225b65b5d33dab5d
|
|
@ -1 +0,0 @@
|
|||
c1f075aa01586c2c28a249ad60bcfb733b69b866
|
|
@ -0,0 +1 @@
|
|||
b193f1f977e64ff77025a4cecd7997cff344c4bc
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue