Merge branch 'feature/ingest' into ingest/date
This commit is contained in:
commit
f657a7dbf3
|
@ -8,6 +8,7 @@ work/
|
|||
logs/
|
||||
.DS_Store
|
||||
build/
|
||||
generated-resources/
|
||||
**/.local*
|
||||
docs/html/
|
||||
docs/build.log
|
||||
|
|
|
@ -209,9 +209,13 @@ The distribution for each project will be created under the @target/releases@ di
|
|||
See the "TESTING":TESTING.asciidoc file for more information about
|
||||
running the Elasticsearch test suite.
|
||||
|
||||
h3. Upgrading to Elasticsearch 1.x?
|
||||
h3. Upgrading from Elasticsearch 1.x?
|
||||
|
||||
In order to ensure a smooth upgrade process from earlier versions of Elasticsearch (< 1.0.0), it is recommended to perform a full cluster restart. Please see the "setup reference":https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html for more details on the upgrade process.
|
||||
In order to ensure a smooth upgrade process from earlier versions of
|
||||
Elasticsearch (1.x), it is required to perform a full cluster restart. Please
|
||||
see the "setup reference":
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html
|
||||
for more details on the upgrade process.
|
||||
|
||||
h1. License
|
||||
|
||||
|
|
155
TESTING.asciidoc
155
TESTING.asciidoc
|
@ -326,152 +326,63 @@ vagrant plugin install vagrant-cachier
|
|||
. Validate your installed dependencies:
|
||||
|
||||
-------------------------------------
|
||||
gradle :qa:vagrant:validate
|
||||
gradle :qa:vagrant:checkVagrantVersion
|
||||
-------------------------------------
|
||||
|
||||
. Download the VMs. Since Maven or ant or something eats the progress reports
|
||||
from Vagrant when you run it inside mvn its probably best if you run this one
|
||||
time to setup all the VMs one at a time. Run this to download and setup the VMs
|
||||
we use for testing by default:
|
||||
. Download and smoke test the VMs with `gradle vagrantSmokeTest` or
|
||||
`gradle vagrantSmokeTestAllDistros`. The first time you run this it will
|
||||
download the base images and provision the boxes and immediately quit. If you
|
||||
you this again it'll skip the download step.
|
||||
|
||||
--------------------------------------------------------
|
||||
vagrant up --provision trusty --provider virtualbox && vagrant halt trusty
|
||||
vagrant up --provision centos-7 --provider virtualbox && vagrant halt centos-7
|
||||
--------------------------------------------------------
|
||||
. Run the tests with `gradle checkPackages`. This will cause gradle to build
|
||||
the tar, zip, and deb packages and all the plugins. It will then run the tests
|
||||
on ubuntu-1404 and centos-7. We chose those two distributions as the default
|
||||
because they cover deb and rpm packaging and SyvVinit and systemd.
|
||||
|
||||
or run this to download and setup all the VMs:
|
||||
You can run on all the VMs by running `gradle checkPackagesAllDistros`. You can
|
||||
run a particular VM with a command like `gradle checkOel7`. See `gradle tasks`
|
||||
for a list. Its important to know that if you ctrl-c any of these `gradle`
|
||||
commands then the boxes will remain running and you'll have to terminate them
|
||||
with `vagrant halt`.
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
vagrant halt
|
||||
for box in $(vagrant status | grep 'poweroff\|not created' | cut -f1 -d' '); do
|
||||
vagrant up --provision $box --provider virtualbox
|
||||
vagrant halt $box
|
||||
done
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
. Smoke test the maven/ant dance that we use to get vagrant involved in
|
||||
integration testing is working:
|
||||
|
||||
---------------------------------------------
|
||||
mvn -Dtests.vagrant -Psmoke-vms -pl qa/vagrant verify
|
||||
---------------------------------------------
|
||||
|
||||
or this to validate all the VMs:
|
||||
|
||||
-------------------------------------------------
|
||||
mvn -Dtests.vagrant=all -Psmoke-vms -pl qa/vagrant verify
|
||||
-------------------------------------------------
|
||||
|
||||
That will start up the VMs and then immediate quit.
|
||||
|
||||
. Finally run the tests. The fastest way to get this started is to run:
|
||||
|
||||
-----------------------------------
|
||||
mvn clean install -DskipTests
|
||||
mvn -Dtests.vagrant -pl qa/vagrant verify
|
||||
-----------------------------------
|
||||
|
||||
You could just run:
|
||||
|
||||
--------------------
|
||||
mvn -Dtests.vagrant verify
|
||||
--------------------
|
||||
|
||||
but that will run all the tests. Which is probably a good thing, but not always
|
||||
what you want.
|
||||
|
||||
Whichever snippet you run mvn will build the tar, zip and deb packages. If you
|
||||
have rpmbuild installed it'll build the rpm package as well. Then mvn will
|
||||
spin up trusty and verify the tar, zip, and deb package. If you have rpmbuild
|
||||
installed it'll spin up centos-7 and verify the tar, zip and rpm packages. We
|
||||
chose those two distributions as the default because they cover deb and rpm
|
||||
packaging and SyvVinit and systemd.
|
||||
|
||||
You can control the boxes that are used for testing like so. Run just
|
||||
fedora-22 with:
|
||||
|
||||
--------------------------------------------
|
||||
mvn -Dtests.vagrant -pl qa/vagrant verify -DboxesToTest=fedora-22
|
||||
--------------------------------------------
|
||||
|
||||
or run jessie and trusty:
|
||||
|
||||
------------------------------------------------------------------
|
||||
mvn -Dtests.vagrant -pl qa/vagrant verify -DboxesToTest='jessie, trusty'
|
||||
------------------------------------------------------------------
|
||||
|
||||
or run all the boxes:
|
||||
|
||||
---------------------------------------
|
||||
mvn -Dtests.vagrant=all -pl qa/vagrant verify
|
||||
---------------------------------------
|
||||
|
||||
If you want to run a specific test on several boxes you can do:
|
||||
|
||||
---------------------------------------
|
||||
mvn -Dtests.vagrant=all -pl qa/vagrant verify -DtestScripts=*tar*.bats
|
||||
---------------------------------------
|
||||
|
||||
Its important to know that if you ctrl-c any of these `mvn` runs that you'll
|
||||
probably leave a VM up. You can terminate it by running:
|
||||
|
||||
------------
|
||||
vagrant halt
|
||||
------------
|
||||
|
||||
This is just regular vagrant so you can run normal multi box vagrant commands
|
||||
to test things manually. Just run:
|
||||
|
||||
---------------------------------------
|
||||
vagrant up trusty --provider virtualbox && vagrant ssh trusty
|
||||
---------------------------------------
|
||||
|
||||
to get an Ubuntu or
|
||||
|
||||
-------------------------------------------
|
||||
vagrant up centos-7 --provider virtualbox && vagrant ssh centos-7
|
||||
-------------------------------------------
|
||||
|
||||
to get a CentOS. Once you are done with them you should halt them:
|
||||
|
||||
-------------------
|
||||
vagrant halt trusty
|
||||
-------------------
|
||||
All the regular vagrant commands should just work so you can get a shell in a
|
||||
VM running trusty by running
|
||||
`vagrant up ubuntu-1404 --provider virtualbox && vagrant ssh ubuntu-1404`.
|
||||
|
||||
These are the linux flavors the Vagrantfile currently supports:
|
||||
|
||||
* precise aka Ubuntu 12.04
|
||||
* trusty aka Ubuntu 14.04
|
||||
* vivid aka Ubuntun 15.04
|
||||
* jessie aka Debian 8, the current debina stable distribution
|
||||
* ubuntu-1204 aka precise
|
||||
* ubuntu-1404 aka trusty
|
||||
* ubuntu-1504 aka vivid
|
||||
* debian-8 aka jessie, the current debian stable distribution
|
||||
* centos-6
|
||||
* centos-7
|
||||
* fedora-22
|
||||
* oel-7 aka Oracle Enterprise Linux 7
|
||||
* sles-12
|
||||
* opensuse-13
|
||||
|
||||
We're missing the following from the support matrix because there aren't high
|
||||
quality boxes available in vagrant atlas:
|
||||
|
||||
* sles-11
|
||||
* opensuse-13
|
||||
* oel-6
|
||||
|
||||
We're missing the follow because our tests are very linux/bash centric:
|
||||
|
||||
* Windows Server 2012
|
||||
|
||||
Its important to think of VMs like cattle: if they become lame you just shoot
|
||||
Its important to think of VMs like cattle. If they become lame you just shoot
|
||||
them and let vagrant reprovision them. Say you've hosed your precise VM:
|
||||
|
||||
----------------------------------------------------
|
||||
vagrant ssh precise -c 'sudo rm -rf /bin'; echo oops
|
||||
vagrant ssh ubuntu-1404 -c 'sudo rm -rf /bin'; echo oops
|
||||
----------------------------------------------------
|
||||
|
||||
All you've got to do to get another one is
|
||||
|
||||
----------------------------------------------
|
||||
vagrant destroy -f trusty && vagrant up trusty --provider virtualbox
|
||||
vagrant destroy -f ubuntu-1404 && vagrant up ubuntu-1404 --provider virtualbox
|
||||
----------------------------------------------
|
||||
|
||||
The whole process takes a minute and a half on a modern laptop, two and a half
|
||||
|
@ -489,13 +400,8 @@ vagrant halt
|
|||
vagrant destroy -f
|
||||
------------------
|
||||
|
||||
|
||||
----------
|
||||
vagrant up
|
||||
----------
|
||||
|
||||
would normally start all the VMs but we've prevented that because that'd
|
||||
consume a ton of ram.
|
||||
`vagrant up` would normally start all the VMs but we've prevented that because
|
||||
that'd consume a ton of ram.
|
||||
|
||||
== Testing scripts more directly
|
||||
|
||||
|
@ -504,7 +410,7 @@ destructive. When working with a single package its generally faster to run its
|
|||
tests in a tighter loop than maven provides. In one window:
|
||||
|
||||
--------------------------------
|
||||
mvn -pl distribution/rpm package
|
||||
gradle :distribution:rpm:assemble
|
||||
--------------------------------
|
||||
|
||||
and in another window:
|
||||
|
@ -518,10 +424,7 @@ sudo bats $BATS/*rpm*.bats
|
|||
If you wanted to retest all the release artifacts on a single VM you could:
|
||||
|
||||
-------------------------------------------------
|
||||
# Build all the distributions fresh but skip recompiling elasticsearch:
|
||||
mvn -amd -pl distribution install -DskipTests
|
||||
# Copy them all the testroot
|
||||
mvn -Dtests.vagrant -pl qa/vagrant pre-integration-test
|
||||
gradle copyDepsToTestRoot
|
||||
vagrant up trusty --provider virtualbox && vagrant ssh trusty
|
||||
cd $TESTROOT
|
||||
sudo bats $BATS/*.bats
|
||||
|
|
|
@ -22,15 +22,15 @@
|
|||
# under the License.
|
||||
|
||||
Vagrant.configure(2) do |config|
|
||||
config.vm.define "precise" do |config|
|
||||
config.vm.define "ubuntu-1204" do |config|
|
||||
config.vm.box = "ubuntu/precise64"
|
||||
ubuntu_common config
|
||||
end
|
||||
config.vm.define "trusty" do |config|
|
||||
config.vm.define "ubuntu-1404" do |config|
|
||||
config.vm.box = "ubuntu/trusty64"
|
||||
ubuntu_common config
|
||||
end
|
||||
config.vm.define "vivid" do |config|
|
||||
config.vm.define "ubuntu-1504" do |config|
|
||||
config.vm.box = "ubuntu/vivid64"
|
||||
ubuntu_common config, extra: <<-SHELL
|
||||
# Install Jayatana so we can work around it being present.
|
||||
|
@ -40,7 +40,7 @@ Vagrant.configure(2) do |config|
|
|||
# Wheezy's backports don't contain Openjdk 8 and the backflips required to
|
||||
# get the sun jdk on there just aren't worth it. We have jessie for testing
|
||||
# debian and it works fine.
|
||||
config.vm.define "jessie" do |config|
|
||||
config.vm.define "debian-8" do |config|
|
||||
config.vm.box = "debian/jessie64"
|
||||
deb_common config,
|
||||
'echo deb http://http.debian.net/debian jessie-backports main > /etc/apt/sources.list.d/backports.list', 'backports'
|
||||
|
@ -137,7 +137,7 @@ def deb_common(config, add_openjdk_repository_command, openjdk_list, extra: '')
|
|||
extra: <<-SHELL
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
ls /etc/apt/sources.list.d/#{openjdk_list}.list > /dev/null 2>&1 ||
|
||||
(echo "Importing java-8 ppa" &&
|
||||
(echo "==> Importing java-8 ppa" &&
|
||||
#{add_openjdk_repository_command} &&
|
||||
apt-get update)
|
||||
#{extra}
|
||||
|
@ -223,9 +223,11 @@ def provision(config,
|
|||
install() {
|
||||
# Only apt-get update if we haven't in the last day
|
||||
if [ ! -f #{update_tracking_file} ] || [ "x$(find #{update_tracking_file} -mtime +0)" == "x#{update_tracking_file}" ]; then
|
||||
#{update_command} || true
|
||||
touch #{update_tracking_file}
|
||||
echo "==> Updating repository"
|
||||
#{update_command} || true
|
||||
touch #{update_tracking_file}
|
||||
fi
|
||||
echo "==> Installing $1"
|
||||
#{install_command} $1
|
||||
}
|
||||
ensure() {
|
||||
|
@ -242,17 +244,18 @@ def provision(config,
|
|||
installed bats || {
|
||||
# Bats lives in a git repository....
|
||||
ensure git
|
||||
echo "==> Installing bats"
|
||||
git clone https://github.com/sstephenson/bats /tmp/bats
|
||||
# Centos doesn't add /usr/local/bin to the path....
|
||||
/tmp/bats/install.sh /usr
|
||||
rm -rf /tmp/bats
|
||||
}
|
||||
cat \<\<VARS > /etc/profile.d/elasticsearch_vars.sh
|
||||
export ZIP=/elasticsearch/distribution/zip/target/releases
|
||||
export TAR=/elasticsearch/distribution/tar/target/releases
|
||||
export RPM=/elasticsearch/distribution/rpm/target/releases
|
||||
export DEB=/elasticsearch/distribution/deb/target/releases
|
||||
export TESTROOT=/elasticsearch/qa/vagrant/target/testroot
|
||||
export ZIP=/elasticsearch/distribution/zip/build/releases
|
||||
export TAR=/elasticsearch/distribution/tar/build/releases
|
||||
export RPM=/elasticsearch/distribution/rpm/build/releases
|
||||
export DEB=/elasticsearch/distribution/deb/build/releases
|
||||
export TESTROOT=/elasticsearch/qa/vagrant/build/testroot
|
||||
export BATS=/elasticsearch/qa/vagrant/src/test/resources/packaging/scripts
|
||||
VARS
|
||||
SHELL
|
||||
|
|
19
build.gradle
19
build.gradle
|
@ -97,6 +97,9 @@ allprojects {
|
|||
randomizedrunner: '2.2.0',
|
||||
httpclient: '4.3.6'
|
||||
]
|
||||
|
||||
// for eclipse hacks...
|
||||
isEclipse = System.getProperty("eclipse.launcher") != null || gradle.startParameter.taskNames.contains('eclipse') || gradle.startParameter.taskNames.contains('cleanEclipse')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -135,13 +138,9 @@ subprojects {
|
|||
dependencySubstitution {
|
||||
substitute module("org.elasticsearch:rest-api-spec:${version}") with project("${projectsPrefix}:rest-api-spec")
|
||||
substitute module("org.elasticsearch:elasticsearch:${version}") with project("${projectsPrefix}:core")
|
||||
// so that eclipse doesn't have circular references
|
||||
// the downside is, if you hack on test-framework, you have to gradle install
|
||||
// the first prop detects eclipse itself, the second detects eclipse from commandline
|
||||
if (System.getProperty("eclipse.launcher") == null && gradle.startParameter.taskNames.contains('eclipse') == false) {
|
||||
substitute module("org.elasticsearch:test-framework:${version}") with project("${projectsPrefix}:test-framework")
|
||||
}
|
||||
substitute module("org.elasticsearch:test-framework:${version}") with project("${projectsPrefix}:test-framework")
|
||||
substitute module("org.elasticsearch.distribution.zip:elasticsearch:${version}") with project("${projectsPrefix}:distribution:zip")
|
||||
substitute module("org.elasticsearch.distribution.tar:elasticsearch:${version}") with project("${projectsPrefix}:distribution:tar")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -153,7 +152,7 @@ allprojects {
|
|||
apply plugin: 'idea'
|
||||
}
|
||||
|
||||
if (hasProperty('projectsPrefix') == false) {
|
||||
if (projectsPrefix.isEmpty()) {
|
||||
idea {
|
||||
project {
|
||||
languageLevel = sourceCompatibility
|
||||
|
@ -193,4 +192,10 @@ allprojects {
|
|||
tasks.eclipse.dependsOn(copyEclipseSettings)
|
||||
}
|
||||
|
||||
// add buildSrc itself as a groovy project
|
||||
task buildSrcEclipse(type: GradleBuild) {
|
||||
buildFile = 'buildSrc/build.gradle'
|
||||
tasks = ['cleanEclipse', 'eclipse']
|
||||
}
|
||||
tasks.eclipse.dependsOn(buildSrcEclipse)
|
||||
|
||||
|
|
|
@ -63,4 +63,3 @@ eclipse {
|
|||
defaultOutputDir = new File(file('build'), 'eclipse')
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -61,6 +61,11 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
ListenersConfiguration listenersConfig = new ListenersConfiguration(task: this)
|
||||
|
||||
List<String> jvmArgs = new ArrayList<>()
|
||||
|
||||
@Optional
|
||||
@Input
|
||||
String argLine = null
|
||||
|
||||
Map<String, String> systemProperties = new HashMap<>()
|
||||
PatternFilterable patternSet = new PatternSet()
|
||||
|
||||
|
@ -193,6 +198,9 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
for (String arg : jvmArgs) {
|
||||
jvmarg(value: arg)
|
||||
}
|
||||
if (argLine != null) {
|
||||
jvmarg(line: argLine)
|
||||
}
|
||||
fileset(dir: testClassesDir) {
|
||||
for (String includePattern : patternSet.getIncludes()) {
|
||||
include(name: includePattern)
|
||||
|
|
|
@ -25,7 +25,7 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv
|
|||
static final String FAILURE_MARKER = " <<< FAILURES!"
|
||||
|
||||
/** Status names column. */
|
||||
static EnumMap<TestStatus, String> statusNames;
|
||||
static EnumMap<? extends TestStatus, String> statusNames;
|
||||
static {
|
||||
statusNames = new EnumMap<>(TestStatus.class);
|
||||
for (TestStatus s : TestStatus.values()) {
|
||||
|
|
|
@ -59,6 +59,11 @@ class BuildPlugin implements Plugin<Project> {
|
|||
throw new GradleException('Gradle 2.6 or above is required to build elasticsearch')
|
||||
}
|
||||
|
||||
// enforce Java version
|
||||
if (!JavaVersion.current().isJava8Compatible()) {
|
||||
throw new GradleException('Java 8 or above is required to build Elasticsearch')
|
||||
}
|
||||
|
||||
// Build debugging info
|
||||
println '======================================='
|
||||
println 'Elasticsearch Build Hamster says Hello!'
|
||||
|
@ -111,6 +116,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
File heapdumpDir = new File(project.buildDir, 'heapdump')
|
||||
heapdumpDir.mkdirs()
|
||||
jvmArg '-XX:HeapDumpPath=' + heapdumpDir
|
||||
argLine System.getProperty('tests.jvm.argline')
|
||||
|
||||
// we use './temp' since this is per JVM and tests are forbidden from writing to CWD
|
||||
systemProperty 'java.io.tmpdir', './temp'
|
||||
|
|
|
@ -49,7 +49,7 @@ class PluginBuildPlugin extends BuildPlugin {
|
|||
project.integTest.configure {
|
||||
dependsOn project.bundlePlugin
|
||||
cluster {
|
||||
plugin 'installPlugin', project.bundlePlugin.outputs.files
|
||||
plugin project.name, project.bundlePlugin.outputs.files
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -84,9 +84,11 @@ class PluginBuildPlugin extends BuildPlugin {
|
|||
static Task configureBundleTask(Project project) {
|
||||
PluginPropertiesTask buildProperties = project.tasks.create(name: 'pluginProperties', type: PluginPropertiesTask)
|
||||
File pluginMetadata = project.file("src/main/plugin-metadata")
|
||||
project.processTestResources {
|
||||
from buildProperties
|
||||
from pluginMetadata
|
||||
project.sourceSets.test {
|
||||
output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties')
|
||||
resources {
|
||||
srcDir pluginMetadata
|
||||
}
|
||||
}
|
||||
Task bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties])
|
||||
bundle.configure {
|
||||
|
|
|
@ -19,77 +19,65 @@
|
|||
package org.elasticsearch.gradle.plugin
|
||||
|
||||
import org.elasticsearch.gradle.ElasticsearchProperties
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.tasks.Copy
|
||||
|
||||
/**
|
||||
* Creates a plugin descriptor.
|
||||
*
|
||||
* TODO: copy the example properties file to plugin documentation
|
||||
*/
|
||||
class PluginPropertiesTask extends DefaultTask {
|
||||
class PluginPropertiesTask extends Copy {
|
||||
|
||||
PluginPropertiesExtension extension
|
||||
Map<String, String> properties = new HashMap<>()
|
||||
File generatedResourcesDir = new File(project.projectDir, 'generated-resources')
|
||||
|
||||
PluginPropertiesTask() {
|
||||
File templateFile = new File(project.buildDir, 'templates/plugin-descriptor.properties')
|
||||
Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') {
|
||||
doLast {
|
||||
InputStream resourceTemplate = PluginPropertiesTask.getResourceAsStream('/plugin-descriptor.properties')
|
||||
templateFile.parentFile.mkdirs()
|
||||
templateFile.setText(resourceTemplate.getText('UTF-8'), 'UTF-8')
|
||||
}
|
||||
}
|
||||
dependsOn(copyPluginPropertiesTemplate)
|
||||
extension = project.extensions.create('esplugin', PluginPropertiesExtension, project)
|
||||
project.clean.delete(generatedResourcesDir)
|
||||
project.afterEvaluate {
|
||||
// check require properties are set
|
||||
if (extension.name == null) {
|
||||
throw new InvalidUserDataException('name is a required setting for esplugin')
|
||||
}
|
||||
if (extension.description == null) {
|
||||
throw new InvalidUserDataException('description is a required setting for esplugin')
|
||||
}
|
||||
if (extension.jvm && extension.classname == null) {
|
||||
throw new InvalidUserDataException('classname is a required setting for esplugin with jvm=true')
|
||||
}
|
||||
if (extension.jvm) {
|
||||
dependsOn(project.classes) // so we can check for the classname
|
||||
}
|
||||
fillProperties()
|
||||
configure {
|
||||
inputs.properties(properties)
|
||||
doFirst {
|
||||
if (extension.jvm && extension.isolated == false) {
|
||||
String warning = "WARNING: Disabling plugin isolation in ${project.path} is deprecated and will be removed in the future"
|
||||
logger.warn("${'=' * warning.length()}\n${warning}\n${'=' * warning.length()}")
|
||||
}
|
||||
}
|
||||
// configure property substitution
|
||||
from(templateFile)
|
||||
into(generatedResourcesDir)
|
||||
expand(generateSubstitutions())
|
||||
}
|
||||
}
|
||||
|
||||
@OutputFile
|
||||
File propertiesFile = new File(project.buildDir, "plugin" + File.separator + "plugin-descriptor.properties")
|
||||
|
||||
void fillProperties() {
|
||||
// TODO: need to copy the templated plugin-descriptor with a dependent task, since copy requires a file (not uri)
|
||||
properties = [
|
||||
Map generateSubstitutions() {
|
||||
return [
|
||||
'name': extension.name,
|
||||
'description': extension.description,
|
||||
'version': extension.version,
|
||||
'elasticsearch.version': ElasticsearchProperties.version,
|
||||
'elasticsearchVersion': ElasticsearchProperties.version,
|
||||
'javaVersion': project.targetCompatibility as String,
|
||||
'jvm': extension.jvm as String,
|
||||
'site': extension.site as String
|
||||
'site': extension.site as String,
|
||||
'isolated': extension.isolated as String,
|
||||
'classname': extension.jvm ? extension.classname : 'NA'
|
||||
]
|
||||
if (extension.jvm) {
|
||||
properties['classname'] = extension.classname
|
||||
properties['isolated'] = extension.isolated as String
|
||||
properties['java.version'] = project.targetCompatibility as String
|
||||
}
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
void buildProperties() {
|
||||
if (extension.jvm) {
|
||||
File classesDir = project.sourceSets.main.output.classesDir
|
||||
File classFile = new File(classesDir, extension.classname.replace('.', File.separator) + '.class')
|
||||
if (classFile.exists() == false) {
|
||||
throw new InvalidUserDataException('classname ' + extension.classname + ' does not exist')
|
||||
}
|
||||
if (extension.isolated == false) {
|
||||
logger.warn('Disabling isolation is deprecated and will be removed in the future')
|
||||
}
|
||||
}
|
||||
|
||||
Properties props = new Properties()
|
||||
for (Map.Entry<String, String> prop : properties) {
|
||||
props.put(prop.getKey(), prop.getValue())
|
||||
}
|
||||
props.store(propertiesFile.newWriter(), null)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,9 @@ import org.gradle.api.tasks.Input
|
|||
/** Configuration for an elasticsearch cluster, used for integration tests. */
|
||||
class ClusterConfiguration {
|
||||
|
||||
@Input
|
||||
String distribution = 'zip'
|
||||
|
||||
@Input
|
||||
int numNodes = 1
|
||||
|
||||
|
@ -33,27 +36,23 @@ class ClusterConfiguration {
|
|||
@Input
|
||||
int transportPort = 9500
|
||||
|
||||
@Input
|
||||
String jvmArgs = System.getProperty('tests.jvm.argline', '')
|
||||
|
||||
Map<String, String> systemProperties = new HashMap<>()
|
||||
|
||||
@Input
|
||||
void systemProperty(String property, String value) {
|
||||
systemProperties.put(property, value)
|
||||
}
|
||||
LinkedHashMap<String, FileCollection> plugins = new LinkedHashMap<>()
|
||||
|
||||
LinkedHashMap<String, Object[]> setupCommands = new LinkedHashMap<>()
|
||||
|
||||
@Input
|
||||
void plugin(String name, FileCollection file) {
|
||||
setupCommands.put(name, ['bin/plugin', 'install', new LazyFileUri(file: file)])
|
||||
plugins.put(name, file)
|
||||
}
|
||||
|
||||
static class LazyFileUri {
|
||||
FileCollection file
|
||||
@Override
|
||||
String toString() {
|
||||
return file.singleFile.toURI().toURL().toString();
|
||||
}
|
||||
|
||||
@Input
|
||||
void systemProperty(String property, String value) {
|
||||
systemProperties.put(property, value)
|
||||
}
|
||||
|
||||
@Input
|
||||
|
|
|
@ -22,8 +22,10 @@ import org.apache.tools.ant.taskdefs.condition.Os
|
|||
import org.elasticsearch.gradle.ElasticsearchProperties
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.tasks.Copy
|
||||
import org.gradle.api.tasks.Delete
|
||||
import org.gradle.api.tasks.Exec
|
||||
|
@ -34,83 +36,193 @@ import org.gradle.api.tasks.Exec
|
|||
class ClusterFormationTasks {
|
||||
|
||||
/**
|
||||
* Adds dependent tasks to the given task to start a cluster with the given configuration.
|
||||
* Also adds a finalize task to stop the cluster.
|
||||
* Adds dependent tasks to the given task to start and stop a cluster with the given configuration.
|
||||
*/
|
||||
static void setup(Project project, Task task, ClusterConfiguration config) {
|
||||
if (task.getEnabled() == false) {
|
||||
// no need to cluster formation if the task won't run!
|
||||
// no need to add cluster formation tasks if the task won't run!
|
||||
return
|
||||
}
|
||||
addZipConfiguration(project)
|
||||
File clusterDir = new File(project.buildDir, 'cluster' + File.separator + task.name)
|
||||
if (config.numNodes == 1) {
|
||||
addNodeStartupTasks(project, task, config, clusterDir)
|
||||
addNodeStopTask(project, task, clusterDir)
|
||||
} else {
|
||||
for (int i = 0; i < config.numNodes; ++i) {
|
||||
File nodeDir = new File(clusterDir, "node${i}")
|
||||
addNodeStartupTasks(project, task, config, nodeDir)
|
||||
addNodeStopTask(project, task, nodeDir)
|
||||
configureDistributionDependency(project, config.distribution)
|
||||
for (int i = 0; i < config.numNodes; ++i) {
|
||||
File nodeDir = new File(project.buildDir, "cluster/${task.name} node${i}")
|
||||
configureTasks(project, task, config, nodeDir)
|
||||
}
|
||||
}
|
||||
|
||||
/** Adds a dependency on the given distribution */
|
||||
static void configureDistributionDependency(Project project, String distro) {
|
||||
String elasticsearchVersion = ElasticsearchProperties.version
|
||||
String packaging = distro == 'tar' ? 'tar.gz' : distro
|
||||
project.configurations {
|
||||
elasticsearchDistro
|
||||
}
|
||||
project.dependencies {
|
||||
elasticsearchDistro "org.elasticsearch.distribution.${distro}:elasticsearch:${elasticsearchVersion}@${packaging}"
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds dependent tasks to start an elasticsearch cluster before the given task is executed,
|
||||
* and stop it after it has finished executing.
|
||||
*
|
||||
* The setup of the cluster involves the following:
|
||||
* <ol>
|
||||
* <li>Cleanup the extraction directory</li>
|
||||
* <li>Extract a fresh copy of elasticsearch</li>
|
||||
* <li>Write an elasticsearch.yml config file</li>
|
||||
* <li>Copy plugins that will be installed to a temporary dir (which contains spaces)</li>
|
||||
* <li>Install plugins</li>
|
||||
* <li>Run additional setup commands</li>
|
||||
* <li>Start elasticsearch<li>
|
||||
* </ol>
|
||||
*/
|
||||
static void configureTasks(Project project, Task task, ClusterConfiguration config, File baseDir) {
|
||||
String clusterName = "${task.path.replace(':', '_').substring(1)}"
|
||||
File pidFile = pidFile(baseDir)
|
||||
File home = homeDir(baseDir, config.distribution)
|
||||
File cwd = new File(baseDir, "cwd")
|
||||
File pluginsTmpDir = new File(baseDir, "plugins tmp")
|
||||
|
||||
// tasks are chained so their execution order is maintained
|
||||
Task setup = project.tasks.create(name: "${task.name}#clean", type: Delete, dependsOn: task.dependsOn.collect()) {
|
||||
delete home
|
||||
doLast {
|
||||
cwd.mkdirs()
|
||||
}
|
||||
}
|
||||
setup = configureCheckPreviousTask("${task.name}#checkPrevious", project, setup, pidFile)
|
||||
setup = configureStopTask("${task.name}#stopPrevious", project, setup, pidFile)
|
||||
setup = configureExtractTask("${task.name}#extract", project, setup, baseDir, config.distribution)
|
||||
setup = configureWriteConfigTask("${task.name}#configure", project, setup, home, config, clusterName, pidFile)
|
||||
setup = configureCopyPluginsTask("${task.name}#copyPlugins", project, setup, pluginsTmpDir, config)
|
||||
|
||||
// install plugins
|
||||
for (Map.Entry<String, FileCollection> plugin : config.plugins.entrySet()) {
|
||||
// replace every dash followed by a character with just the uppercase character
|
||||
String camelName = plugin.getKey().replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) }
|
||||
String taskName = "${task.name}#install${camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1)}"
|
||||
// delay reading the file location until execution time by wrapping in a closure within a GString
|
||||
String file = "${ -> new File(pluginsTmpDir, plugin.getValue().singleFile.getName()).toURI().toURL().toString() }"
|
||||
Object[] args = [new File(home, 'bin/plugin'), 'install', file]
|
||||
setup = configureExecTask(taskName, project, setup, cwd, args)
|
||||
}
|
||||
|
||||
// extra setup commands
|
||||
for (Map.Entry<String, Object[]> command : config.setupCommands.entrySet()) {
|
||||
setup = configureExecTask("${task.name}#${command.getKey()}", project, setup, cwd, command.getValue())
|
||||
}
|
||||
|
||||
Task start = configureStartTask("${task.name}#start", project, setup, cwd, config, clusterName, pidFile, home)
|
||||
task.dependsOn(start)
|
||||
|
||||
Task stop = configureStopTask("${task.name}#stop", project, [], pidFile)
|
||||
task.finalizedBy(stop)
|
||||
}
|
||||
|
||||
/** Adds a task to extract the elasticsearch distribution */
|
||||
static Task configureExtractTask(String name, Project project, Task setup, File baseDir, String distro) {
|
||||
List extractDependsOn = [project.configurations.elasticsearchDistro, setup]
|
||||
Task extract
|
||||
switch (distro) {
|
||||
case 'zip':
|
||||
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
|
||||
from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) }
|
||||
into baseDir
|
||||
}
|
||||
break;
|
||||
case 'tar':
|
||||
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
|
||||
from {
|
||||
project.tarTree(project.resources.gzip(project.configurations.elasticsearchDistro.singleFile))
|
||||
}
|
||||
into baseDir
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new InvalidUserDataException("Unknown distribution: ${distro}")
|
||||
}
|
||||
return extract
|
||||
}
|
||||
|
||||
/** Adds a task to write elasticsearch.yml for the given node configuration */
|
||||
static Task configureWriteConfigTask(String name, Project project, Task setup, File home, ClusterConfiguration config, String clusterName, File pidFile) {
|
||||
Map esConfig = [
|
||||
'cluster.name' : clusterName,
|
||||
'http.port' : config.httpPort,
|
||||
'transport.tcp.port' : config.transportPort,
|
||||
'pidfile' : pidFile,
|
||||
// TODO: make this work for multi node!
|
||||
'discovery.zen.ping.unicast.hosts': "localhost:${config.transportPort}",
|
||||
'path.repo' : "${home}/repo",
|
||||
'path.shared_data' : "${home}/../",
|
||||
// Define a node attribute so we can test that it exists
|
||||
'node.testattr' : 'test',
|
||||
'repositories.url.allowed_urls' : 'http://snapshot.test*'
|
||||
]
|
||||
|
||||
return project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) << {
|
||||
File configFile = new File(home, 'config/elasticsearch.yml')
|
||||
logger.info("Configuring ${configFile}")
|
||||
configFile.setText(esConfig.collect { key, value -> "${key}: ${value}" }.join('\n'), 'UTF-8')
|
||||
}
|
||||
}
|
||||
|
||||
/** Adds a task to copy plugins to a temp dir, which they will later be installed from. */
|
||||
static Task configureCopyPluginsTask(String name, Project project, Task setup, File pluginsTmpDir, ClusterConfiguration config) {
|
||||
if (config.plugins.isEmpty()) {
|
||||
return setup
|
||||
}
|
||||
// collect the files for plugins into a list, but wrap each in a closure to delay
|
||||
// looking for the filename until execution time
|
||||
List files = config.plugins.values().collect { plugin -> return { plugin.singleFile } }
|
||||
return project.tasks.create(name: name, type: Copy, dependsOn: setup) {
|
||||
into pluginsTmpDir
|
||||
from(*files) // spread the list into varargs
|
||||
}
|
||||
}
|
||||
|
||||
/** Adds a task to execute a command to help setup the cluster */
|
||||
static Task configureExecTask(String name, Project project, Task setup, File cwd, Object[] execArgs) {
|
||||
return project.tasks.create(name: name, type: Exec, dependsOn: setup) {
|
||||
workingDir cwd
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
executable 'cmd'
|
||||
args '/C', 'call'
|
||||
} else {
|
||||
executable 'sh'
|
||||
}
|
||||
args execArgs
|
||||
// only show output on failure, when not in info or debug mode
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
standardOutput = new ByteArrayOutputStream()
|
||||
errorOutput = standardOutput
|
||||
ignoreExitValue = true
|
||||
doLast {
|
||||
if (execResult.exitValue != 0) {
|
||||
logger.error(standardOutput.toString())
|
||||
throw new GradleException("Process '${execArgs.join(' ')}' finished with non-zero exit value ${execResult.exitValue}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void addNodeStartupTasks(Project project, Task task, ClusterConfiguration config, File baseDir) {
|
||||
String clusterName = "${task.path.replace(':', '_').substring(1)}"
|
||||
File home = new File(baseDir, "elasticsearch-${ElasticsearchProperties.version}")
|
||||
List setupDependsOn = [project.configurations.elasticsearchZip]
|
||||
setupDependsOn.addAll(task.dependsOn)
|
||||
Task setup = project.tasks.create(name: task.name + '#setup', type: Copy, dependsOn: setupDependsOn) {
|
||||
from { project.zipTree(project.configurations.elasticsearchZip.singleFile) }
|
||||
into baseDir
|
||||
}
|
||||
// chain setup tasks to maintain their order
|
||||
setup = project.tasks.create(name: "${task.name}#clean", type: Delete, dependsOn: setup) {
|
||||
delete new File(home, 'plugins'), new File(home, 'data'), new File(home, 'logs')
|
||||
}
|
||||
setup = project.tasks.create(name: "${task.name}#configure", type: DefaultTask, dependsOn: setup) << {
|
||||
File configFile = new File(home, 'config' + File.separator + 'elasticsearch.yml')
|
||||
logger.info("Configuring ${configFile}")
|
||||
configFile.setText("cluster.name: ${clusterName}", 'UTF-8')
|
||||
}
|
||||
for (Map.Entry<String, String> command : config.setupCommands.entrySet()) {
|
||||
Task nextSetup = project.tasks.create(name: "${task.name}#${command.getKey()}", type: Exec, dependsOn: setup) {
|
||||
workingDir home
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
executable 'cmd'
|
||||
args '/C', 'call'
|
||||
} else {
|
||||
executable 'sh'
|
||||
}
|
||||
args command.getValue()
|
||||
// only show output on failure, when not in info or debug mode
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
standardOutput = new ByteArrayOutputStream()
|
||||
errorOutput = standardOutput
|
||||
ignoreExitValue = true
|
||||
doLast {
|
||||
if (execResult.exitValue != 0) {
|
||||
logger.error(standardOutput.toString())
|
||||
throw new GradleException("Process '${command.getValue().join(' ')}' finished with non-zero exit value ${execResult.exitValue}")
|
||||
}
|
||||
}
|
||||
}
|
||||
/** Adds a task to start an elasticsearch node with the given configuration */
|
||||
static Task configureStartTask(String name, Project project, Task setup, File cwd, ClusterConfiguration config, String clusterName, File pidFile, File home) {
|
||||
Map esEnv = [
|
||||
'JAVA_HOME' : System.getProperty('java.home'),
|
||||
'ES_GC_OPTS': config.jvmArgs
|
||||
]
|
||||
List esProps = config.systemProperties.collect { key, value -> "-D${key}=${value}" }
|
||||
for (Map.Entry<String, String> property : System.properties.entrySet()) {
|
||||
if (property.getKey().startsWith('es.')) {
|
||||
esProps.add("-D${property.getKey()}=${property.getValue()}")
|
||||
}
|
||||
setup = nextSetup
|
||||
}
|
||||
|
||||
File pidFile = pidFile(baseDir)
|
||||
List esArgs = [
|
||||
"-Des.http.port=${config.httpPort}",
|
||||
"-Des.transport.tcp.port=${config.transportPort}",
|
||||
"-Des.pidfile=${pidFile}",
|
||||
"-Des.path.repo=${home}/repo",
|
||||
"-Des.path.shared_data=${home}/../",
|
||||
]
|
||||
esArgs.addAll(config.systemProperties.collect {key, value -> "-D${key}=${value}"})
|
||||
Closure esPostStartActions = { ant, logger ->
|
||||
ant.waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: "failed${task.name}#start") {
|
||||
ant.waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: "failed${name.capitalize()}") {
|
||||
and {
|
||||
resourceexists {
|
||||
file file: pidFile.toString()
|
||||
|
@ -118,35 +230,40 @@ class ClusterFormationTasks {
|
|||
http(url: "http://localhost:${config.httpPort}")
|
||||
}
|
||||
}
|
||||
if (ant.properties.containsKey("failed${task.name}#start".toString())) {
|
||||
new File(home, 'logs' + File.separator + clusterName + '.log').eachLine {
|
||||
line -> logger.error(line)
|
||||
if (ant.properties.containsKey("failed${name}".toString())) {
|
||||
File logFile = new File(home, "logs/${clusterName}.log")
|
||||
if (logFile.exists()) {
|
||||
logFile.eachLine { line -> logger.error(line) }
|
||||
}
|
||||
throw new GradleException('Failed to start elasticsearch')
|
||||
}
|
||||
}
|
||||
Task start;
|
||||
File esScript = new File(home, 'bin/elasticsearch')
|
||||
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
// elasticsearch.bat is spawned as it has no daemon mode
|
||||
start = project.tasks.create(name: "${task.name}#start", type: DefaultTask, dependsOn: setup) << {
|
||||
return project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) << {
|
||||
// Fall back to Ant exec task as Gradle Exec task does not support spawning yet
|
||||
ant.exec(executable: 'cmd', spawn: true, dir: home) {
|
||||
(['/C', 'call', 'bin/elasticsearch'] + esArgs).each { arg(value: it) }
|
||||
ant.exec(executable: 'cmd', spawn: true, dir: cwd) {
|
||||
esEnv.each { key, value -> env(key: key, value: value) }
|
||||
(['/C', 'call', esScript] + esProps).each { arg(value: it) }
|
||||
}
|
||||
esPostStartActions(ant, logger)
|
||||
}
|
||||
} else {
|
||||
start = project.tasks.create(name: "${task.name}#start", type: Exec, dependsOn: setup) {
|
||||
workingDir home
|
||||
return project.tasks.create(name: name, type: Exec, dependsOn: setup) {
|
||||
workingDir cwd
|
||||
executable 'sh'
|
||||
args 'bin/elasticsearch', '-d' // daemonize!
|
||||
args esArgs
|
||||
args esScript, '-d' // daemonize!
|
||||
args esProps
|
||||
environment esEnv
|
||||
errorOutput = new ByteArrayOutputStream()
|
||||
doLast {
|
||||
if (errorOutput.toString().isEmpty() == false) {
|
||||
logger.error(errorOutput.toString())
|
||||
new File(home, 'logs' + File.separator + clusterName + '.log').eachLine {
|
||||
line -> logger.error(line)
|
||||
File logFile = new File(home, "logs/${clusterName}.log")
|
||||
if (logFile.exists()) {
|
||||
logFile.eachLine { line -> logger.error(line) }
|
||||
}
|
||||
throw new GradleException('Failed to start elasticsearch')
|
||||
}
|
||||
|
@ -154,46 +271,69 @@ class ClusterFormationTasks {
|
|||
}
|
||||
}
|
||||
}
|
||||
task.dependsOn(start)
|
||||
}
|
||||
|
||||
static void addNodeStopTask(Project project, Task task, File baseDir) {
|
||||
LazyPidReader pidFile = new LazyPidReader(pidFile: pidFile(baseDir))
|
||||
Task stop = project.tasks.create(name: task.name + '#stop', type: Exec) {
|
||||
/** Adds a task to check if the process with the given pidfile is actually elasticsearch */
|
||||
static Task configureCheckPreviousTask(String name, Project project, Object depends, File pidFile) {
|
||||
return project.tasks.create(name: name, type: Exec, dependsOn: depends) {
|
||||
onlyIf { pidFile.exists() }
|
||||
// the pid file won't actually be read until execution time, since the read is wrapped within an inner closure of the GString
|
||||
ext.pid = "${ -> pidFile.getText('UTF-8').trim()}"
|
||||
commandLine new File(System.getenv('JAVA_HOME'), 'bin/jps'), '-l'
|
||||
standardOutput = new ByteArrayOutputStream()
|
||||
doLast {
|
||||
String out = standardOutput.toString()
|
||||
if (out.contains("${pid} org.elasticsearch.bootstrap.Elasticsearch") == false) {
|
||||
logger.error('jps -l')
|
||||
logger.error(out)
|
||||
logger.error("pid file: ${pidFile}")
|
||||
logger.error("pid: ${pid}")
|
||||
throw new GradleException("jps -l did not report any process with org.elasticsearch.bootstrap.Elasticsearch\n" +
|
||||
"Did you run gradle clean? Maybe an old pid file is still lying around.")
|
||||
} else {
|
||||
logger.info(out)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Adds a task to kill an elasticsearch node with the given pidfile */
|
||||
static Task configureStopTask(String name, Project project, Object depends, File pidFile) {
|
||||
return project.tasks.create(name: name, type: Exec, dependsOn: depends) {
|
||||
onlyIf { pidFile.exists() }
|
||||
// the pid file won't actually be read until execution time, since the read is wrapped within an inner closure of the GString
|
||||
ext.pid = "${ -> pidFile.getText('UTF-8').trim()}"
|
||||
doFirst {
|
||||
logger.info("Shutting down external node with pid ${pid}")
|
||||
}
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
executable 'Taskkill'
|
||||
args '/PID', pidFile, '/F'
|
||||
args '/PID', pid, '/F'
|
||||
} else {
|
||||
executable 'kill'
|
||||
args '-9', pidFile
|
||||
args '-9', pid
|
||||
}
|
||||
doLast {
|
||||
// TODO: wait for pid to close, or kill -9 and fail
|
||||
project.delete(pidFile)
|
||||
}
|
||||
}
|
||||
task.finalizedBy(stop)
|
||||
}
|
||||
|
||||
/** Delays reading a pid file until needing to use the pid */
|
||||
static class LazyPidReader {
|
||||
File pidFile
|
||||
@Override
|
||||
String toString() {
|
||||
return pidFile.text.stripMargin()
|
||||
/** Returns the directory elasticsearch home is contained in for the given distribution */
|
||||
static File homeDir(File baseDir, String distro) {
|
||||
String path
|
||||
switch (distro) {
|
||||
case 'zip':
|
||||
case 'tar':
|
||||
path = "elasticsearch-${ElasticsearchProperties.version}"
|
||||
break;
|
||||
default:
|
||||
throw new InvalidUserDataException("Unknown distribution: ${distro}")
|
||||
}
|
||||
return new File(baseDir, path)
|
||||
}
|
||||
|
||||
static File pidFile(File dir) {
|
||||
return new File(dir, 'es.pid')
|
||||
}
|
||||
|
||||
static void addZipConfiguration(Project project) {
|
||||
String elasticsearchVersion = ElasticsearchProperties.version
|
||||
project.configurations {
|
||||
elasticsearchZip
|
||||
}
|
||||
project.dependencies {
|
||||
elasticsearchZip "org.elasticsearch.distribution.zip:elasticsearch:${elasticsearchVersion}@zip"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.gradle.test
|
||||
|
||||
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
|
||||
import org.elasticsearch.gradle.ElasticsearchProperties
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
|
||||
|
@ -28,19 +27,7 @@ class RestTestPlugin implements Plugin<Project> {
|
|||
|
||||
@Override
|
||||
void apply(Project project) {
|
||||
project.pluginManager.apply('java-base')
|
||||
project.pluginManager.apply('carrotsearch.randomized-testing')
|
||||
|
||||
// remove some unnecessary tasks for a qa test
|
||||
project.tasks.removeAll { it.name in ['assemble', 'buildDependents'] }
|
||||
|
||||
// only setup tests to build
|
||||
project.sourceSets {
|
||||
test
|
||||
}
|
||||
project.dependencies {
|
||||
testCompile "org.elasticsearch:test-framework:${ElasticsearchProperties.version}"
|
||||
}
|
||||
project.pluginManager.apply(StandaloneTestBasePlugin)
|
||||
|
||||
RandomizedTestingTask integTest = RestIntegTestTask.configure(project)
|
||||
RestSpecHack.configureDependencies(project)
|
||||
|
@ -48,12 +35,5 @@ class RestTestPlugin implements Plugin<Project> {
|
|||
classpath = project.sourceSets.test.runtimeClasspath
|
||||
testClassesDir project.sourceSets.test.output.classesDir
|
||||
}
|
||||
|
||||
project.eclipse {
|
||||
classpath {
|
||||
sourceSets = [project.sourceSets.test]
|
||||
plusConfigurations = [project.configurations.testRuntime]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
package org.elasticsearch.gradle.test
|
||||
|
||||
import com.carrotsearch.gradle.junit4.RandomizedTestingPlugin
|
||||
import org.elasticsearch.gradle.ElasticsearchProperties
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
|
||||
/** Configures the build to have a rest integration test. */
|
||||
class StandaloneTestBasePlugin implements Plugin<Project> {
|
||||
|
||||
@Override
|
||||
void apply(Project project) {
|
||||
project.pluginManager.apply(JavaBasePlugin)
|
||||
project.pluginManager.apply(RandomizedTestingPlugin)
|
||||
|
||||
// remove some unnecessary tasks for a qa test
|
||||
project.tasks.removeAll { it.name in ['assemble', 'buildDependents'] }
|
||||
|
||||
// only setup tests to build
|
||||
project.sourceSets {
|
||||
test
|
||||
}
|
||||
project.dependencies {
|
||||
testCompile "org.elasticsearch:test-framework:${ElasticsearchProperties.version}"
|
||||
}
|
||||
|
||||
project.eclipse {
|
||||
classpath {
|
||||
sourceSets = [project.sourceSets.test]
|
||||
plusConfigurations = [project.configurations.testRuntime]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.test
|
||||
|
||||
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.elasticsearch.gradle.ElasticsearchProperties
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
|
||||
/** Configures the build to have only unit tests. */
|
||||
class StandaloneTestPlugin implements Plugin<Project> {
|
||||
|
||||
@Override
|
||||
void apply(Project project) {
|
||||
project.pluginManager.apply(StandaloneTestBasePlugin)
|
||||
|
||||
Map testOptions = [
|
||||
name: 'test',
|
||||
type: RandomizedTestingTask,
|
||||
dependsOn: 'testClasses',
|
||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
||||
description: 'Runs unit tests that are separate'
|
||||
]
|
||||
RandomizedTestingTask test = project.tasks.create(testOptions)
|
||||
test.configure(BuildPlugin.commonTestConfig(project))
|
||||
test.configure {
|
||||
classpath = project.sourceSets.test.runtimeClasspath
|
||||
testClassesDir project.sourceSets.test.output.classesDir
|
||||
}
|
||||
project.check.dependsOn(test)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.vagrant
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.tasks.*
|
||||
import org.gradle.logging.ProgressLogger
|
||||
import org.gradle.logging.ProgressLoggerFactory
|
||||
import org.gradle.process.internal.ExecAction
|
||||
import org.gradle.process.internal.ExecActionFactory
|
||||
|
||||
import javax.inject.Inject
|
||||
|
||||
/**
|
||||
* Runs bats over vagrant. Pretty much like running it using Exec but with a
|
||||
* nicer output formatter.
|
||||
*/
|
||||
class BatsOverVagrantTask extends DefaultTask {
|
||||
String command
|
||||
String boxName
|
||||
ExecAction execAction
|
||||
|
||||
BatsOverVagrantTask() {
|
||||
execAction = getExecActionFactory().newExecAction()
|
||||
}
|
||||
|
||||
@Inject
|
||||
ProgressLoggerFactory getProgressLoggerFactory() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Inject
|
||||
ExecActionFactory getExecActionFactory() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
void boxName(String boxName) {
|
||||
this.boxName = boxName
|
||||
}
|
||||
|
||||
void command(String command) {
|
||||
this.command = command
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
void exec() {
|
||||
// It'd be nice if --machine-readable were, well, nice
|
||||
execAction.commandLine(['vagrant', 'ssh', boxName, '--command', command])
|
||||
execAction.setStandardOutput(new TapLoggerOutputStream(
|
||||
command: command,
|
||||
factory: getProgressLoggerFactory(),
|
||||
logger: logger))
|
||||
execAction.execute();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.vagrant
|
||||
|
||||
import com.carrotsearch.gradle.junit4.LoggingOutputStream
|
||||
import org.gradle.api.GradleScriptException
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.logging.Logger
|
||||
import org.gradle.logging.ProgressLogger
|
||||
import org.gradle.logging.ProgressLoggerFactory
|
||||
import java.util.regex.Matcher
|
||||
|
||||
/**
|
||||
* Adapts an OutputStream containing output from bats into a ProgressLogger
|
||||
* and a Logger. Every test output goes to the ProgressLogger and all failures
|
||||
* and non-test output goes to the Logger. That means you can always glance
|
||||
* at the result of the last test and the cumulative pass/fail/skip stats and
|
||||
* the failures are all logged.
|
||||
*
|
||||
* There is a Tap4j project but we can't use it because it wants to parse the
|
||||
* entire TAP stream at once and won't parse it stream-wise.
|
||||
*/
|
||||
class TapLoggerOutputStream extends LoggingOutputStream {
|
||||
ProgressLogger progressLogger
|
||||
Logger logger
|
||||
int testsCompleted = 0
|
||||
int testsFailed = 0
|
||||
int testsSkipped = 0
|
||||
Integer testCount
|
||||
String countsFormat
|
||||
|
||||
TapLoggerOutputStream(Map args) {
|
||||
logger = args.logger
|
||||
progressLogger = args.factory.newOperation(VagrantLoggerOutputStream)
|
||||
progressLogger.setDescription("TAP output for $args.command")
|
||||
progressLogger.started()
|
||||
progressLogger.progress("Starting $args.command...")
|
||||
}
|
||||
|
||||
void flush() {
|
||||
if (end == start) return
|
||||
line(new String(buffer, start, end - start))
|
||||
start = end
|
||||
}
|
||||
|
||||
void line(String line) {
|
||||
// System.out.print "===> $line\n"
|
||||
if (testCount == null) {
|
||||
try {
|
||||
testCount = line.split('\\.').last().toInteger()
|
||||
def length = (testCount as String).length()
|
||||
countsFormat = "%0${length}d"
|
||||
countsFormat = "[$countsFormat|$countsFormat|$countsFormat/$countsFormat]"
|
||||
return
|
||||
} catch (Exception e) {
|
||||
throw new GradleScriptException(
|
||||
'Error parsing first line of TAP stream!!', e)
|
||||
}
|
||||
}
|
||||
Matcher m = line =~ /(?<status>ok|not ok) \d+(?<skip> # skip (?<skipReason>\(.+\))?)? \[(?<suite>.+)\] (?<test>.+)/
|
||||
if (!m.matches()) {
|
||||
/* These might be failure report lines or comments or whatever. Its hard
|
||||
to tell and it doesn't matter. */
|
||||
logger.warn(line)
|
||||
return
|
||||
}
|
||||
boolean skipped = m.group('skip') != null
|
||||
boolean success = !skipped && m.group('status') == 'ok'
|
||||
String skipReason = m.group('skipReason')
|
||||
String suiteName = m.group('suite')
|
||||
String testName = m.group('test')
|
||||
|
||||
String status
|
||||
if (skipped) {
|
||||
status = "SKIPPED"
|
||||
testsSkipped++
|
||||
} else if (success) {
|
||||
status = " OK"
|
||||
testsCompleted++
|
||||
} else {
|
||||
status = " FAILED"
|
||||
testsFailed++
|
||||
}
|
||||
|
||||
String counts = sprintf(countsFormat,
|
||||
[testsCompleted, testsFailed, testsSkipped, testCount])
|
||||
progressLogger.progress("Tests $counts, $status [$suiteName] $testName")
|
||||
if (!success) {
|
||||
logger.warn(line)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.vagrant
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.tasks.*
|
||||
import org.gradle.logging.ProgressLogger
|
||||
import org.gradle.logging.ProgressLoggerFactory
|
||||
import org.gradle.process.internal.ExecAction
|
||||
import org.gradle.process.internal.ExecActionFactory
|
||||
|
||||
import javax.inject.Inject
|
||||
|
||||
/**
|
||||
* Runs a vagrant command. Pretty much like Exec task but with a nicer output
|
||||
* formatter and defaults to `vagrant` as first part of commandLine.
|
||||
*/
|
||||
class VagrantCommandTask extends DefaultTask {
|
||||
List<Object> commandLine
|
||||
String boxName
|
||||
ExecAction execAction
|
||||
|
||||
VagrantCommandTask() {
|
||||
execAction = getExecActionFactory().newExecAction()
|
||||
}
|
||||
|
||||
@Inject
|
||||
ProgressLoggerFactory getProgressLoggerFactory() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Inject
|
||||
ExecActionFactory getExecActionFactory() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
void boxName(String boxName) {
|
||||
this.boxName = boxName
|
||||
}
|
||||
|
||||
void commandLine(Object... commandLine) {
|
||||
this.commandLine = commandLine
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
void exec() {
|
||||
// It'd be nice if --machine-readable were, well, nice
|
||||
execAction.commandLine(['vagrant'] + commandLine)
|
||||
execAction.setStandardOutput(new VagrantLoggerOutputStream(
|
||||
command: commandLine.join(' '),
|
||||
factory: getProgressLoggerFactory(),
|
||||
/* Vagrant tends to output a lot of stuff, but most of the important
|
||||
stuff starts with ==> $box */
|
||||
squashedPrefix: "==> $boxName: "))
|
||||
execAction.execute();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,121 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.vagrant
|
||||
|
||||
import com.carrotsearch.gradle.junit4.LoggingOutputStream
|
||||
import org.gradle.logging.ProgressLogger
|
||||
import org.gradle.logging.ProgressLoggerFactory
|
||||
|
||||
/**
|
||||
* Adapts an OutputStream being written to by vagrant into a ProcessLogger. It
|
||||
* has three hacks to make the output nice:
|
||||
*
|
||||
* 1. Attempt to filter out the "unimportant" output from vagrant. Usually
|
||||
* vagrant prefixes its more important output with "==> $boxname: ". The stuff
|
||||
* that isn't prefixed that way can just be thrown out.
|
||||
*
|
||||
* 2. It also attempts to detect when vagrant does tricks assuming its writing
|
||||
* to a terminal emulator and renders the output more like gradle users expect.
|
||||
* This means that progress indicators for things like box downloading work and
|
||||
* box importing look pretty good.
|
||||
*
|
||||
* 3. It catches lines that look like "==> $boxName ==> Heading text" and stores
|
||||
* the text after the second arrow as a "heading" for use in annotating
|
||||
* provisioning. It does this because provisioning can spit out _lots_ of text
|
||||
* and its very easy to lose context when there isn't a scrollback. So we've
|
||||
* sprinkled `echo "==> Heading text"` into the provisioning scripts for this
|
||||
* to catch so it can render the output like
|
||||
* "Heading text > stdout from the provisioner".
|
||||
*/
|
||||
class VagrantLoggerOutputStream extends LoggingOutputStream {
|
||||
static final String HEADING_PREFIX = '==> '
|
||||
|
||||
ProgressLogger progressLogger
|
||||
String squashedPrefix
|
||||
String lastLine = ''
|
||||
boolean inProgressReport = false
|
||||
String heading = ''
|
||||
|
||||
VagrantLoggerOutputStream(Map args) {
|
||||
progressLogger = args.factory.newOperation(VagrantLoggerOutputStream)
|
||||
progressLogger.setDescription("Vagrant $args.command")
|
||||
progressLogger.started()
|
||||
progressLogger.progress("Starting vagrant $args.command...")
|
||||
squashedPrefix = args.squashedPrefix
|
||||
}
|
||||
|
||||
void flush() {
|
||||
if (end == start) return
|
||||
line(new String(buffer, start, end - start))
|
||||
start = end
|
||||
}
|
||||
|
||||
void line(String line) {
|
||||
// debugPrintLine(line) // Uncomment me to log every incoming line
|
||||
if (line.startsWith('\r\u001b')) {
|
||||
/* We don't want to try to be a full terminal emulator but we want to
|
||||
keep the escape sequences from leaking and catch _some_ of the
|
||||
meaning. */
|
||||
line = line.substring(2)
|
||||
if ('[K' == line) {
|
||||
inProgressReport = true
|
||||
}
|
||||
return
|
||||
}
|
||||
if (line.startsWith(squashedPrefix)) {
|
||||
line = line.substring(squashedPrefix.length())
|
||||
inProgressReport = false
|
||||
lastLine = line
|
||||
if (line.startsWith(HEADING_PREFIX)) {
|
||||
line = line.substring(HEADING_PREFIX.length())
|
||||
heading = line + ' > '
|
||||
} else {
|
||||
line = heading + line
|
||||
}
|
||||
} else if (inProgressReport) {
|
||||
inProgressReport = false
|
||||
line = lastLine + line
|
||||
} else {
|
||||
return
|
||||
}
|
||||
// debugLogLine(line) // Uncomment me to log every line we add to the logger
|
||||
progressLogger.progress(line)
|
||||
}
|
||||
|
||||
void debugPrintLine(line) {
|
||||
System.out.print '----------> '
|
||||
for (int i = start; i < end; i++) {
|
||||
switch (buffer[i] as char) {
|
||||
case ' '..'~':
|
||||
System.out.print buffer[i] as char
|
||||
break
|
||||
default:
|
||||
System.out.print '%'
|
||||
System.out.print Integer.toHexString(buffer[i])
|
||||
}
|
||||
}
|
||||
System.out.print '\n'
|
||||
}
|
||||
|
||||
void debugLogLine(line) {
|
||||
System.out.print '>>>>>>>>>>> '
|
||||
System.out.print line
|
||||
System.out.print '\n'
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
#
|
||||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
implementation-class=org.elasticsearch.gradle.test.StandaloneTestPlugin
|
File diff suppressed because one or more lines are too long
|
@ -100,3 +100,8 @@ org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()
|
|||
|
||||
@defaultMessage this method needs special permission
|
||||
java.lang.Thread#getAllStackTraces()
|
||||
|
||||
@defaultMessage Please do not terminate the application
|
||||
java.lang.System#exit(int)
|
||||
java.lang.Runtime#exit(int)
|
||||
java.lang.Runtime#halt(int)
|
|
@ -31,19 +31,19 @@
|
|||
### mandatory elements for all plugins:
|
||||
#
|
||||
# 'description': simple summary of the plugin
|
||||
description=${project.description}
|
||||
description=${description}
|
||||
#
|
||||
# 'version': plugin's version
|
||||
version=${project.version}
|
||||
version=${version}
|
||||
#
|
||||
# 'name': the plugin name
|
||||
name=${elasticsearch.plugin.name}
|
||||
name=${name}
|
||||
|
||||
### mandatory elements for site plugins:
|
||||
#
|
||||
# 'site': set to true to indicate contents of the _site/
|
||||
# directory in the root of the plugin should be served.
|
||||
site=${elasticsearch.plugin.site}
|
||||
site=${site}
|
||||
#
|
||||
### mandatory elements for jvm plugins :
|
||||
#
|
||||
|
@ -52,19 +52,19 @@ site=${elasticsearch.plugin.site}
|
|||
# Note that only jar files in the root directory are
|
||||
# added to the classpath for the plugin! If you need
|
||||
# other resources, package them into a resources jar.
|
||||
jvm=${elasticsearch.plugin.jvm}
|
||||
jvm=${jvm}
|
||||
#
|
||||
# 'classname': the name of the class to load, fully-qualified.
|
||||
classname=${elasticsearch.plugin.classname}
|
||||
classname=${classname}
|
||||
#
|
||||
# 'java.version' version of java the code is built against
|
||||
# use the system property java.specification.version
|
||||
# version string must be a sequence of nonnegative decimal integers
|
||||
# separated by "."'s and may have leading zeros
|
||||
java.version=${java.target.version}
|
||||
java.version=${javaVersion}
|
||||
#
|
||||
# 'elasticsearch.version' version of elasticsearch compiled against
|
||||
elasticsearch.version=${elasticsearch.version}
|
||||
elasticsearch.version=${elasticsearchVersion}
|
||||
#
|
||||
### deprecated elements for jvm plugins :
|
||||
#
|
||||
|
@ -72,5 +72,5 @@ elasticsearch.version=${elasticsearch.version}
|
|||
# passing false is deprecated, and only intended to support plugins
|
||||
# that have hard dependencies against each other. If this is
|
||||
# not specified, then the plugin is isolated by default.
|
||||
isolated=${elasticsearch.plugin.isolated}
|
||||
isolated=${isolated}
|
||||
#
|
|
@ -89,9 +89,24 @@ dependencies {
|
|||
|
||||
compile 'net.java.dev.jna:jna:4.1.0', optional
|
||||
|
||||
testCompile("org.elasticsearch:test-framework:${version}") {
|
||||
// tests use the locally compiled version of core
|
||||
exclude group: 'org.elasticsearch', module: 'elasticsearch'
|
||||
if (isEclipse == false || project.path == "${projectsPrefix}:core-tests") {
|
||||
testCompile("org.elasticsearch:test-framework:${version}") {
|
||||
// tests use the locally compiled version of core
|
||||
exclude group: 'org.elasticsearch', module: 'elasticsearch'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isEclipse) {
|
||||
// in eclipse the project is under a fake root, we need to change around the source sets
|
||||
sourceSets {
|
||||
if (project.path == "${projectsPrefix}:core") {
|
||||
main.java.srcDirs = ['java']
|
||||
main.resources.srcDirs = ['resources']
|
||||
} else {
|
||||
test.java.srcDirs = ['java']
|
||||
test.resources.srcDirs = ['resources']
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -104,19 +119,21 @@ forbiddenPatterns {
|
|||
exclude '**/org/elasticsearch/cluster/routing/shard_routes.txt'
|
||||
}
|
||||
|
||||
task integTest(type: RandomizedTestingTask,
|
||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
||||
description: 'Multi-node tests',
|
||||
dependsOn: test.dependsOn) {
|
||||
configure(BuildPlugin.commonTestConfig(project))
|
||||
classpath = project.test.classpath
|
||||
testClassesDir = project.test.testClassesDir
|
||||
include '**/*IT.class'
|
||||
if (isEclipse == false || project.path == "${projectsPrefix}:core-tests") {
|
||||
task integTest(type: RandomizedTestingTask,
|
||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
||||
description: 'Multi-node tests',
|
||||
dependsOn: test.dependsOn) {
|
||||
configure(BuildPlugin.commonTestConfig(project))
|
||||
classpath = project.test.classpath
|
||||
testClassesDir = project.test.testClassesDir
|
||||
include '**/*IT.class'
|
||||
}
|
||||
check.dependsOn integTest
|
||||
integTest.mustRunAfter test
|
||||
|
||||
RestSpecHack.configureDependencies(project)
|
||||
Task copyRestSpec = RestSpecHack.configureTask(project, true)
|
||||
integTest.dependsOn copyRestSpec
|
||||
}
|
||||
check.dependsOn integTest
|
||||
integTest.mustRunAfter test
|
||||
|
||||
RestSpecHack.configureDependencies(project)
|
||||
Task copyRestSpec = RestSpecHack.configureTask(project, true)
|
||||
integTest.dependsOn copyRestSpec
|
||||
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
|
||||
// this is just shell gradle file for eclipse to have separate projects for core src and tests
|
||||
apply from: '../../build.gradle'
|
File diff suppressed because it is too large
Load Diff
|
@ -1,258 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.apache.lucene.search.suggest.analyzing;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStreamToAutomaton;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IntsRef;
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.apache.lucene.util.automaton.*;
|
||||
import org.apache.lucene.util.fst.FST;
|
||||
import org.apache.lucene.util.fst.PairOutputs;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES;
|
||||
|
||||
/**
|
||||
* Implements a fuzzy {@link AnalyzingSuggester}. The similarity measurement is
|
||||
* based on the Damerau-Levenshtein (optimal string alignment) algorithm, though
|
||||
* you can explicitly choose classic Levenshtein by passing <code>false</code>
|
||||
* for the <code>transpositions</code> parameter.
|
||||
* <p>
|
||||
* At most, this query will match terms up to
|
||||
* {@value org.apache.lucene.util.automaton.LevenshteinAutomata#MAXIMUM_SUPPORTED_DISTANCE}
|
||||
* edits. Higher distances are not supported. Note that the
|
||||
* fuzzy distance is measured in "byte space" on the bytes
|
||||
* returned by the {@link org.apache.lucene.analysis.TokenStream}'s {@link
|
||||
* org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute}, usually UTF8. By default
|
||||
* the analyzed bytes must be at least 3 {@link
|
||||
* #DEFAULT_MIN_FUZZY_LENGTH} bytes before any edits are
|
||||
* considered. Furthermore, the first 1 {@link
|
||||
* #DEFAULT_NON_FUZZY_PREFIX} byte is not allowed to be
|
||||
* edited. We allow up to 1 (@link
|
||||
* #DEFAULT_MAX_EDITS} edit.
|
||||
* If {@link #unicodeAware} parameter in the constructor is set to true, maxEdits,
|
||||
* minFuzzyLength, transpositions and nonFuzzyPrefix are measured in Unicode code
|
||||
* points (actual letters) instead of bytes.*
|
||||
*
|
||||
* <p>
|
||||
* NOTE: This suggester does not boost suggestions that
|
||||
* required no edits over suggestions that did require
|
||||
* edits. This is a known limitation.
|
||||
*
|
||||
* <p>
|
||||
* Note: complex query analyzers can have a significant impact on the lookup
|
||||
* performance. It's recommended to not use analyzers that drop or inject terms
|
||||
* like synonyms to keep the complexity of the prefix intersection low for good
|
||||
* lookup performance. At index time, complex analyzers can safely be used.
|
||||
* </p>
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public final class XFuzzySuggester extends XAnalyzingSuggester {
|
||||
private final int maxEdits;
|
||||
private final boolean transpositions;
|
||||
private final int nonFuzzyPrefix;
|
||||
private final int minFuzzyLength;
|
||||
private final boolean unicodeAware;
|
||||
|
||||
/**
|
||||
* Measure maxEdits, minFuzzyLength, transpositions and nonFuzzyPrefix
|
||||
* parameters in Unicode code points (actual letters)
|
||||
* instead of bytes.
|
||||
*/
|
||||
public static final boolean DEFAULT_UNICODE_AWARE = false;
|
||||
|
||||
/**
|
||||
* The default minimum length of the key passed to {@link
|
||||
* #lookup} before any edits are allowed.
|
||||
*/
|
||||
public static final int DEFAULT_MIN_FUZZY_LENGTH = 3;
|
||||
|
||||
/**
|
||||
* The default prefix length where edits are not allowed.
|
||||
*/
|
||||
public static final int DEFAULT_NON_FUZZY_PREFIX = 1;
|
||||
|
||||
/**
|
||||
* The default maximum number of edits for fuzzy
|
||||
* suggestions.
|
||||
*/
|
||||
public static final int DEFAULT_MAX_EDITS = 1;
|
||||
|
||||
/**
|
||||
* The default transposition value passed to {@link org.apache.lucene.util.automaton.LevenshteinAutomata}
|
||||
*/
|
||||
public static final boolean DEFAULT_TRANSPOSITIONS = true;
|
||||
|
||||
/**
|
||||
* Creates a {@link FuzzySuggester} instance initialized with default values.
|
||||
*
|
||||
* @param analyzer the analyzer used for this suggester
|
||||
*/
|
||||
public XFuzzySuggester(Analyzer analyzer) {
|
||||
this(analyzer, analyzer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link FuzzySuggester} instance with an index & a query analyzer initialized with default values.
|
||||
*
|
||||
* @param indexAnalyzer
|
||||
* Analyzer that will be used for analyzing suggestions while building the index.
|
||||
* @param queryAnalyzer
|
||||
* Analyzer that will be used for analyzing query text during lookup
|
||||
*/
|
||||
public XFuzzySuggester(Analyzer indexAnalyzer, Analyzer queryAnalyzer) {
|
||||
this(indexAnalyzer, null, queryAnalyzer, EXACT_FIRST | PRESERVE_SEP, 256, -1, DEFAULT_MAX_EDITS, DEFAULT_TRANSPOSITIONS,
|
||||
DEFAULT_NON_FUZZY_PREFIX, DEFAULT_MIN_FUZZY_LENGTH, DEFAULT_UNICODE_AWARE, null, false, 0, SEP_LABEL, PAYLOAD_SEP, END_BYTE, HOLE_CHARACTER);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link FuzzySuggester} instance.
|
||||
*
|
||||
* @param indexAnalyzer Analyzer that will be used for
|
||||
* analyzing suggestions while building the index.
|
||||
* @param queryAnalyzer Analyzer that will be used for
|
||||
* analyzing query text during lookup
|
||||
* @param options see {@link #EXACT_FIRST}, {@link #PRESERVE_SEP}
|
||||
* @param maxSurfaceFormsPerAnalyzedForm Maximum number of
|
||||
* surface forms to keep for a single analyzed form.
|
||||
* When there are too many surface forms we discard the
|
||||
* lowest weighted ones.
|
||||
* @param maxGraphExpansions Maximum number of graph paths
|
||||
* to expand from the analyzed form. Set this to -1 for
|
||||
* no limit.
|
||||
* @param maxEdits must be >= 0 and <= {@link org.apache.lucene.util.automaton.LevenshteinAutomata#MAXIMUM_SUPPORTED_DISTANCE} .
|
||||
* @param transpositions <code>true</code> if transpositions should be treated as a primitive
|
||||
* edit operation. If this is false, comparisons will implement the classic
|
||||
* Levenshtein algorithm.
|
||||
* @param nonFuzzyPrefix length of common (non-fuzzy) prefix (see default {@link #DEFAULT_NON_FUZZY_PREFIX}
|
||||
* @param minFuzzyLength minimum length of lookup key before any edits are allowed (see default {@link #DEFAULT_MIN_FUZZY_LENGTH})
|
||||
* @param sepLabel separation label
|
||||
* @param payloadSep payload separator byte
|
||||
* @param endByte end byte marker byte
|
||||
*/
|
||||
public XFuzzySuggester(Analyzer indexAnalyzer, Automaton queryPrefix, Analyzer queryAnalyzer, int options, int maxSurfaceFormsPerAnalyzedForm, int maxGraphExpansions,
|
||||
int maxEdits, boolean transpositions, int nonFuzzyPrefix, int minFuzzyLength, boolean unicodeAware,
|
||||
FST<PairOutputs.Pair<Long, BytesRef>> fst, boolean hasPayloads, int maxAnalyzedPathsForOneInput,
|
||||
int sepLabel, int payloadSep, int endByte, int holeCharacter) {
|
||||
super(indexAnalyzer, queryPrefix, queryAnalyzer, options, maxSurfaceFormsPerAnalyzedForm, maxGraphExpansions, true, fst, hasPayloads, maxAnalyzedPathsForOneInput, sepLabel, payloadSep, endByte, holeCharacter);
|
||||
if (maxEdits < 0 || maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) {
|
||||
throw new IllegalArgumentException("maxEdits must be between 0 and " + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE);
|
||||
}
|
||||
if (nonFuzzyPrefix < 0) {
|
||||
throw new IllegalArgumentException("nonFuzzyPrefix must not be >= 0 (got " + nonFuzzyPrefix + ")");
|
||||
}
|
||||
if (minFuzzyLength < 0) {
|
||||
throw new IllegalArgumentException("minFuzzyLength must not be >= 0 (got " + minFuzzyLength + ")");
|
||||
}
|
||||
|
||||
this.maxEdits = maxEdits;
|
||||
this.transpositions = transpositions;
|
||||
this.nonFuzzyPrefix = nonFuzzyPrefix;
|
||||
this.minFuzzyLength = minFuzzyLength;
|
||||
this.unicodeAware = unicodeAware;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<FSTUtil.Path<PairOutputs.Pair<Long,BytesRef>>> getFullPrefixPaths(List<FSTUtil.Path<PairOutputs.Pair<Long,BytesRef>>> prefixPaths,
|
||||
Automaton lookupAutomaton,
|
||||
FST<PairOutputs.Pair<Long,BytesRef>> fst)
|
||||
throws IOException {
|
||||
|
||||
// TODO: right now there's no penalty for fuzzy/edits,
|
||||
// ie a completion whose prefix matched exactly what the
|
||||
// user typed gets no boost over completions that
|
||||
// required an edit, which get no boost over completions
|
||||
// requiring two edits. I suspect a multiplicative
|
||||
// factor is appropriate (eg, say a fuzzy match must be at
|
||||
// least 2X better weight than the non-fuzzy match to
|
||||
// "compete") ... in which case I think the wFST needs
|
||||
// to be log weights or something ...
|
||||
|
||||
Automaton levA = convertAutomaton(toLevenshteinAutomata(lookupAutomaton));
|
||||
/*
|
||||
Writer w = new OutputStreamWriter(new FileOutputStream("out.dot"), "UTF-8");
|
||||
w.write(levA.toDot());
|
||||
w.close();
|
||||
System.out.println("Wrote LevA to out.dot");
|
||||
*/
|
||||
return FSTUtil.intersectPrefixPaths(levA, fst);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Automaton convertAutomaton(Automaton a) {
|
||||
if (unicodeAware) {
|
||||
// FLORIAN EDIT: get converted Automaton from superclass
|
||||
Automaton utf8automaton = new UTF32ToUTF8().convert(super.convertAutomaton(a));
|
||||
// This automaton should not blow up during determinize:
|
||||
utf8automaton = Operations.determinize(utf8automaton, Integer.MAX_VALUE);
|
||||
return utf8automaton;
|
||||
} else {
|
||||
return super.convertAutomaton(a);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStreamToAutomaton getTokenStreamToAutomaton() {
|
||||
final TokenStreamToAutomaton tsta = super.getTokenStreamToAutomaton();
|
||||
tsta.setUnicodeArcs(unicodeAware);
|
||||
return tsta;
|
||||
}
|
||||
|
||||
Automaton toLevenshteinAutomata(Automaton automaton) {
|
||||
List<Automaton> subs = new ArrayList<>();
|
||||
FiniteStringsIterator finiteStrings = new FiniteStringsIterator(automaton);
|
||||
for (IntsRef string; (string = finiteStrings.next()) != null;) {
|
||||
if (string.length <= nonFuzzyPrefix || string.length < minFuzzyLength) {
|
||||
subs.add(Automata.makeString(string.ints, string.offset, string.length));
|
||||
} else {
|
||||
int ints[] = new int[string.length-nonFuzzyPrefix];
|
||||
System.arraycopy(string.ints, string.offset+nonFuzzyPrefix, ints, 0, ints.length);
|
||||
// TODO: maybe add alphaMin to LevenshteinAutomata,
|
||||
// and pass 1 instead of 0? We probably don't want
|
||||
// to allow the trailing dedup bytes to be
|
||||
// edited... but then 0 byte is "in general" allowed
|
||||
// on input (but not in UTF8).
|
||||
LevenshteinAutomata lev = new LevenshteinAutomata(ints, unicodeAware ? Character.MAX_CODE_POINT : 255, transpositions);
|
||||
subs.add(lev.toAutomaton(maxEdits, UnicodeUtil.newString(string.ints, string.offset, nonFuzzyPrefix)));
|
||||
}
|
||||
}
|
||||
|
||||
if (subs.isEmpty()) {
|
||||
// automaton is empty, there is no accepted paths through it
|
||||
return Automata.makeEmpty(); // matches nothing
|
||||
} else if (subs.size() == 1) {
|
||||
// no synonyms or anything: just a single path through the tokenstream
|
||||
return subs.get(0);
|
||||
} else {
|
||||
// multiple paths: this is really scary! is it slow?
|
||||
// maybe we should not do this and throw UOE?
|
||||
Automaton a = Operations.union(subs);
|
||||
// TODO: we could call toLevenshteinAutomata() before det?
|
||||
// this only happens if you have multiple paths anyway (e.g. synonyms)
|
||||
return Operations.determinize(a, DEFAULT_MAX_DETERMINIZED_STATES);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -19,14 +19,18 @@
|
|||
|
||||
package org.elasticsearch;
|
||||
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Properties;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.jar.JarInputStream;
|
||||
import java.util.jar.Manifest;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -35,65 +39,69 @@ public class Build {
|
|||
public static final Build CURRENT;
|
||||
|
||||
static {
|
||||
String hash = "NA";
|
||||
String hashShort = "NA";
|
||||
String timestamp = "NA";
|
||||
final String shortHash;
|
||||
final String date;
|
||||
|
||||
try (InputStream is = Build.class.getResourceAsStream("/es-build.properties")){
|
||||
Properties props = new Properties();
|
||||
props.load(is);
|
||||
hash = props.getProperty("hash", hash);
|
||||
if (!hash.equals("NA")) {
|
||||
hashShort = hash.substring(0, 7);
|
||||
Path path = getElasticsearchCodebase();
|
||||
if (path.toString().endsWith(".jar")) {
|
||||
try (JarInputStream jar = new JarInputStream(Files.newInputStream(path))) {
|
||||
Manifest manifest = jar.getManifest();
|
||||
shortHash = manifest.getMainAttributes().getValue("Change");
|
||||
date = manifest.getMainAttributes().getValue("Build-Date");
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
String gitTimestampRaw = props.getProperty("timestamp");
|
||||
if (gitTimestampRaw != null) {
|
||||
timestamp = ISODateTimeFormat.dateTimeNoMillis().withZone(DateTimeZone.UTC).print(Long.parseLong(gitTimestampRaw));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// just ignore...
|
||||
} else {
|
||||
// not running from a jar (unit tests, IDE)
|
||||
shortHash = "Unknown";
|
||||
date = "Unknown";
|
||||
}
|
||||
|
||||
CURRENT = new Build(hash, hashShort, timestamp);
|
||||
CURRENT = new Build(shortHash, date);
|
||||
}
|
||||
|
||||
private String hash;
|
||||
private String hashShort;
|
||||
private String timestamp;
|
||||
|
||||
Build(String hash, String hashShort, String timestamp) {
|
||||
this.hash = hash;
|
||||
this.hashShort = hashShort;
|
||||
this.timestamp = timestamp;
|
||||
/**
|
||||
* Returns path to elasticsearch codebase path
|
||||
*/
|
||||
@SuppressForbidden(reason = "looks up path of elasticsearch.jar directly")
|
||||
static Path getElasticsearchCodebase() {
|
||||
URL url = Build.class.getProtectionDomain().getCodeSource().getLocation();
|
||||
try {
|
||||
return PathUtils.get(url.toURI());
|
||||
} catch (URISyntaxException bogus) {
|
||||
throw new RuntimeException(bogus);
|
||||
}
|
||||
}
|
||||
|
||||
public String hash() {
|
||||
return hash;
|
||||
private String shortHash;
|
||||
private String date;
|
||||
|
||||
Build(String shortHash, String date) {
|
||||
this.shortHash = shortHash;
|
||||
this.date = date;
|
||||
}
|
||||
|
||||
public String hashShort() {
|
||||
return hashShort;
|
||||
public String shortHash() {
|
||||
return shortHash;
|
||||
}
|
||||
|
||||
public String timestamp() {
|
||||
return timestamp;
|
||||
public String date() {
|
||||
return date;
|
||||
}
|
||||
|
||||
public static Build readBuild(StreamInput in) throws IOException {
|
||||
String hash = in.readString();
|
||||
String hashShort = in.readString();
|
||||
String timestamp = in.readString();
|
||||
return new Build(hash, hashShort, timestamp);
|
||||
String date = in.readString();
|
||||
return new Build(hash, date);
|
||||
}
|
||||
|
||||
public static void writeBuild(Build build, StreamOutput out) throws IOException {
|
||||
out.writeString(build.hash());
|
||||
out.writeString(build.hashShort());
|
||||
out.writeString(build.timestamp());
|
||||
out.writeString(build.shortHash());
|
||||
out.writeString(build.date());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + hash + "][" + timestamp + "]";
|
||||
return "[" + shortHash + "][" + date + "]";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -661,7 +661,7 @@ public class Version {
|
|||
|
||||
@SuppressForbidden(reason = "System.out.*")
|
||||
public static void main(String[] args) {
|
||||
System.out.println("Version: " + Version.CURRENT + ", Build: " + Build.CURRENT.hashShort() + "/" + Build.CURRENT.timestamp() + ", JVM: " + JvmInfo.jvmInfo().version());
|
||||
System.out.println("Version: " + Version.CURRENT + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + ", JVM: " + JvmInfo.jvmInfo().version());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -77,7 +77,7 @@ public class NodesInfoResponse extends BaseNodesResponse<NodeInfo> implements To
|
|||
builder.field("ip", nodeInfo.getNode().getHostAddress(), XContentBuilder.FieldCaseConversion.NONE);
|
||||
|
||||
builder.field("version", nodeInfo.getVersion());
|
||||
builder.field("build", nodeInfo.getBuild().hashShort());
|
||||
builder.field("build_hash", nodeInfo.getBuild().shortHash());
|
||||
|
||||
if (nodeInfo.getServiceAttributes() != null) {
|
||||
for (Map.Entry<String, String> nodeAttribute : nodeInfo.getServiceAttributes().entrySet()) {
|
||||
|
|
|
@ -301,6 +301,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
public static class OsStats implements ToXContent, Streamable {
|
||||
|
||||
int availableProcessors;
|
||||
int allocatedProcessors;
|
||||
long availableMemory;
|
||||
final ObjectIntHashMap<String> names;
|
||||
|
||||
|
@ -310,6 +311,8 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
|
||||
public void addNodeInfo(NodeInfo nodeInfo) {
|
||||
availableProcessors += nodeInfo.getOs().getAvailableProcessors();
|
||||
allocatedProcessors += nodeInfo.getOs().getAllocatedProcessors();
|
||||
|
||||
if (nodeInfo.getOs().getName() != null) {
|
||||
names.addTo(nodeInfo.getOs().getName(), 1);
|
||||
}
|
||||
|
@ -319,6 +322,10 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
return availableProcessors;
|
||||
}
|
||||
|
||||
public int getAllocatedProcessors() {
|
||||
return allocatedProcessors;
|
||||
}
|
||||
|
||||
public ByteSizeValue getAvailableMemory() {
|
||||
return new ByteSizeValue(availableMemory);
|
||||
}
|
||||
|
@ -326,6 +333,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
availableProcessors = in.readVInt();
|
||||
allocatedProcessors = in.readVInt();
|
||||
availableMemory = in.readLong();
|
||||
int size = in.readVInt();
|
||||
names.clear();
|
||||
|
@ -337,6 +345,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(availableProcessors);
|
||||
out.writeVInt(allocatedProcessors);
|
||||
out.writeLong(availableMemory);
|
||||
out.writeVInt(names.size());
|
||||
for (ObjectIntCursor<String> name : names) {
|
||||
|
@ -353,6 +362,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString AVAILABLE_PROCESSORS = new XContentBuilderString("available_processors");
|
||||
static final XContentBuilderString ALLOCATED_PROCESSORS = new XContentBuilderString("allocated_processors");
|
||||
static final XContentBuilderString NAME = new XContentBuilderString("name");
|
||||
static final XContentBuilderString NAMES = new XContentBuilderString("names");
|
||||
static final XContentBuilderString MEM = new XContentBuilderString("mem");
|
||||
|
@ -364,6 +374,7 @@ public class ClusterStatsNodes implements ToXContent, Streamable {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(Fields.AVAILABLE_PROCESSORS, availableProcessors);
|
||||
builder.field(Fields.ALLOCATED_PROCESSORS, allocatedProcessors);
|
||||
builder.startObject(Fields.MEM);
|
||||
builder.byteSizeField(Fields.TOTAL_IN_BYTES, Fields.TOTAL, availableMemory);
|
||||
builder.endObject();
|
||||
|
|
|
@ -119,7 +119,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
|
|||
}
|
||||
if (field == null) {
|
||||
if (indexService != null) {
|
||||
field = indexService.queryParserService().defaultField();
|
||||
field = indexService.getIndexSettings().getDefaultField();
|
||||
} else {
|
||||
field = AllFieldMapper.NAME;
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
|
@ -162,8 +162,8 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
|
|||
@Override
|
||||
protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest request) {
|
||||
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
|
||||
IndexQueryParserService queryParserService = indexService.queryParserService();
|
||||
IndexShard indexShard = indexService.getShard(request.shardId().id());
|
||||
final QueryShardContext queryShardContext = indexShard.getQueryShardContext();
|
||||
|
||||
boolean valid;
|
||||
String explanation = null;
|
||||
|
@ -178,7 +178,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
|
|||
);
|
||||
SearchContext.setCurrent(searchContext);
|
||||
try {
|
||||
searchContext.parsedQuery(queryParserService.toQuery(request.query()));
|
||||
searchContext.parsedQuery(queryShardContext.toQuery(request.query()));
|
||||
searchContext.preProcess();
|
||||
|
||||
valid = true;
|
||||
|
|
|
@ -121,7 +121,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||
SearchContext.setCurrent(context);
|
||||
|
||||
try {
|
||||
context.parsedQuery(indexService.queryParserService().toQuery(request.query()));
|
||||
context.parsedQuery(indexShard.getQueryShardContext().toQuery(request.query()));
|
||||
context.preProcess();
|
||||
int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().context.docBase;
|
||||
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);
|
||||
|
|
|
@ -38,13 +38,11 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
|
|||
private final ShardSearchFailure[] shardFailures;
|
||||
|
||||
public SearchPhaseExecutionException(String phaseName, String msg, ShardSearchFailure[] shardFailures) {
|
||||
super(msg);
|
||||
this.phaseName = phaseName;
|
||||
this.shardFailures = shardFailures;
|
||||
this(phaseName, msg, null, shardFailures);
|
||||
}
|
||||
|
||||
public SearchPhaseExecutionException(String phaseName, String msg, Throwable cause, ShardSearchFailure[] shardFailures) {
|
||||
super(msg, cause);
|
||||
super(msg, deduplicateCause(cause, shardFailures));
|
||||
this.phaseName = phaseName;
|
||||
this.shardFailures = shardFailures;
|
||||
}
|
||||
|
@ -63,12 +61,26 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
|
|||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeOptionalString(phaseName);
|
||||
out.writeVInt(shardFailures == null ? 0 : shardFailures.length);
|
||||
if (shardFailures != null) {
|
||||
out.writeVInt(shardFailures.length);
|
||||
for (ShardSearchFailure failure : shardFailures) {
|
||||
failure.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
private static final Throwable deduplicateCause(Throwable cause, ShardSearchFailure[] shardFailures) {
|
||||
if (shardFailures == null) {
|
||||
throw new IllegalArgumentException("shardSearchFailures must not be null");
|
||||
}
|
||||
// if the cause of this exception is also the cause of one of the shard failures we don't add it
|
||||
// to prevent duplication in stack traces rendered to the REST layer
|
||||
if (cause != null) {
|
||||
for (ShardSearchFailure failure : shardFailures) {
|
||||
failure.writeTo(out);
|
||||
if (failure.getCause() == cause) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return cause;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -114,7 +114,7 @@ public class TransportSearchScrollQueryAndFetchAction extends AbstractComponent
|
|||
|
||||
public void start() {
|
||||
if (scrollId.getContext().length == 0) {
|
||||
listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", null));
|
||||
listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -175,7 +175,7 @@ public class TransportSearchScrollQueryAndFetchAction extends AbstractComponent
|
|||
successfulOps.decrementAndGet();
|
||||
if (counter.decrementAndGet() == 0) {
|
||||
if (successfulOps.get() == 0) {
|
||||
listener.onFailure(new SearchPhaseExecutionException("query_fetch", "all shards failed", buildShardFailures()));
|
||||
listener.onFailure(new SearchPhaseExecutionException("query_fetch", "all shards failed", t, buildShardFailures()));
|
||||
} else {
|
||||
finishHim();
|
||||
}
|
||||
|
|
|
@ -123,7 +123,7 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent
|
|||
|
||||
public void start() {
|
||||
if (scrollId.getContext().length == 0) {
|
||||
listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", null));
|
||||
listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY));
|
||||
return;
|
||||
}
|
||||
final AtomicInteger counter = new AtomicInteger(scrollId.getContext().length);
|
||||
|
@ -143,7 +143,7 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent
|
|||
try {
|
||||
executeFetchPhase();
|
||||
} catch (Throwable e) {
|
||||
listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, null));
|
||||
listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, ShardSearchFailure.EMPTY_ARRAY));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -181,12 +181,12 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent
|
|||
successfulOps.decrementAndGet();
|
||||
if (counter.decrementAndGet() == 0) {
|
||||
if (successfulOps.get() == 0) {
|
||||
listener.onFailure(new SearchPhaseExecutionException("query", "all shards failed", buildShardFailures()));
|
||||
listener.onFailure(new SearchPhaseExecutionException("query", "all shards failed", t, buildShardFailures()));
|
||||
} else {
|
||||
try {
|
||||
executeFetchPhase();
|
||||
} catch (Throwable e) {
|
||||
listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, null));
|
||||
listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, ShardSearchFailure.EMPTY_ARRAY));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -220,17 +220,19 @@ public abstract class TransportSearchTypeAction extends TransportAction<SearchRe
|
|||
logger.trace("{}: Failed to execute [{}]", t, shard, request);
|
||||
}
|
||||
}
|
||||
final ShardSearchFailure[] shardSearchFailures = buildShardFailures();
|
||||
if (successfulOps.get() == 0) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("All shards failed for phase: [{}]", t, firstPhaseName());
|
||||
}
|
||||
|
||||
// no successful ops, raise an exception
|
||||
raiseEarlyFailure(new SearchPhaseExecutionException(firstPhaseName(), "all shards failed", buildShardFailures()));
|
||||
raiseEarlyFailure(new SearchPhaseExecutionException(firstPhaseName(), "all shards failed", t, shardSearchFailures));
|
||||
} else {
|
||||
try {
|
||||
innerMoveToSecondPhase();
|
||||
} catch (Throwable e) {
|
||||
raiseEarlyFailure(new ReduceSearchPhaseException(firstPhaseName(), "", e, buildShardFailures()));
|
||||
raiseEarlyFailure(new ReduceSearchPhaseException(firstPhaseName(), "", e, shardSearchFailures));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -143,7 +143,7 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
|
|||
throw new IllegalArgumentException("suggest content missing");
|
||||
}
|
||||
final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(),
|
||||
indexService.queryParserService(), request.shardId().getIndex(), request.shardId().id(), request);
|
||||
indexService.fieldData(), request.shardId().getIndex(), request.shardId().id(), request);
|
||||
final Suggest result = suggestPhase.execute(context, searcher.searcher());
|
||||
return new ShardSuggestResponse(request.shardId(), result);
|
||||
}
|
||||
|
|
|
@ -228,7 +228,13 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
|||
nodeIds = new HashMap<>();
|
||||
|
||||
for (ShardRouting shard : shardIt.asUnordered()) {
|
||||
if (shard.assignedToNode()) {
|
||||
// send a request to the shard only if it is assigned to a node that is in the local node's cluster state
|
||||
// a scenario in which a shard can be assigned but to a node that is not in the local node's cluster state
|
||||
// is when the shard is assigned to the master node, the local node has detected the master as failed
|
||||
// and a new master has not yet been elected; in this situation the local node will have removed the
|
||||
// master node from the local cluster state, but the shards assigned to the master will still be in the
|
||||
// routing table as such
|
||||
if (shard.assignedToNode() && nodes.get(shard.currentNodeId()) != null) {
|
||||
String nodeId = shard.currentNodeId();
|
||||
if (!nodeIds.containsKey(nodeId)) {
|
||||
nodeIds.put(nodeId, new ArrayList<>());
|
||||
|
|
|
@ -235,7 +235,7 @@ final class Bootstrap {
|
|||
CliTool.ExitStatus status = bootstrapCLIParser.execute(args);
|
||||
|
||||
if (CliTool.ExitStatus.OK != status) {
|
||||
System.exit(status.status());
|
||||
exit(status.status());
|
||||
}
|
||||
|
||||
INSTANCE = new Bootstrap();
|
||||
|
@ -343,7 +343,12 @@ final class Bootstrap {
|
|||
if (confFileSetting != null && confFileSetting.isEmpty() == false) {
|
||||
ESLogger logger = Loggers.getLogger(Bootstrap.class);
|
||||
logger.info("{} is no longer supported. elasticsearch.yml must be placed in the config directory and cannot be renamed.", settingName);
|
||||
System.exit(1);
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Allowed to exit explicitly in bootstrap phase")
|
||||
private static void exit(int status) {
|
||||
System.exit(status);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ final class BootstrapCLIParser extends CliTool {
|
|||
|
||||
@Override
|
||||
public ExitStatus execute(Settings settings, Environment env) throws Exception {
|
||||
terminal.println("Version: %s, Build: %s/%s, JVM: %s", org.elasticsearch.Version.CURRENT, Build.CURRENT.hashShort(), Build.CURRENT.timestamp(), JvmInfo.jvmInfo().version());
|
||||
terminal.println("Version: %s, Build: %s/%s, JVM: %s", org.elasticsearch.Version.CURRENT, Build.CURRENT.shortHash(), Build.CURRENT.date(), JvmInfo.jvmInfo().version());
|
||||
return ExitStatus.OK_AND_EXIT;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.net.URL;
|
|||
import java.security.CodeSource;
|
||||
import java.security.Permission;
|
||||
import java.security.PermissionCollection;
|
||||
import java.security.Permissions;
|
||||
import java.security.Policy;
|
||||
import java.security.ProtectionDomain;
|
||||
import java.util.Map;
|
||||
|
@ -89,6 +90,21 @@ final class ESPolicy extends Policy {
|
|||
return template.implies(domain, permission) || dynamic.implies(permission);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PermissionCollection getPermissions(CodeSource codesource) {
|
||||
// code should not rely on this method, or at least use it correctly:
|
||||
// https://bugs.openjdk.java.net/browse/JDK-8014008
|
||||
// return them a new empty permissions object so jvisualvm etc work
|
||||
for (StackTraceElement element : Thread.currentThread().getStackTrace()) {
|
||||
if ("sun.rmi.server.LoaderHandler".equals(element.getClassName()) &&
|
||||
"loadClass".equals(element.getMethodName())) {
|
||||
return new Permissions();
|
||||
}
|
||||
}
|
||||
// return UNSUPPORTED_EMPTY_COLLECTION since it is safe.
|
||||
return super.getPermissions(codesource);
|
||||
}
|
||||
|
||||
/**
|
||||
* Classy puzzler to rethrow any checked exception as an unchecked one.
|
||||
*/
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import com.sun.jna.Native;
|
||||
import com.sun.jna.NativeLong;
|
||||
import com.sun.jna.Structure;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
|
@ -55,8 +56,8 @@ final class JNACLibrary {
|
|||
|
||||
/** corresponds to struct rlimit */
|
||||
public static final class Rlimit extends Structure implements Structure.ByReference {
|
||||
public long rlim_cur = 0;
|
||||
public long rlim_max = 0;
|
||||
public NativeLong rlim_cur = new NativeLong(0);
|
||||
public NativeLong rlim_max = new NativeLong(0);
|
||||
|
||||
@Override
|
||||
protected List<String> getFieldOrder() {
|
||||
|
|
|
@ -71,8 +71,8 @@ class JNANatives {
|
|||
JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit();
|
||||
if (JNACLibrary.getrlimit(JNACLibrary.RLIMIT_MEMLOCK, rlimit) == 0) {
|
||||
rlimitSuccess = true;
|
||||
softLimit = rlimit.rlim_cur;
|
||||
hardLimit = rlimit.rlim_max;
|
||||
softLimit = rlimit.rlim_cur.longValue();
|
||||
hardLimit = rlimit.rlim_max.longValue();
|
||||
} else {
|
||||
logger.warn("Unable to retrieve resource limits: " + JNACLibrary.strerror(Native.getLastError()));
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.bootstrap;
|
|||
import com.sun.jna.Library;
|
||||
import com.sun.jna.Memory;
|
||||
import com.sun.jna.Native;
|
||||
import com.sun.jna.NativeLong;
|
||||
import com.sun.jna.Pointer;
|
||||
import com.sun.jna.Structure;
|
||||
import com.sun.jna.ptr.PointerByReference;
|
||||
|
@ -38,7 +39,9 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Installs a limited form of secure computing mode,
|
||||
|
@ -46,7 +49,7 @@ import java.util.List;
|
|||
* <p>
|
||||
* This is only supported on the Linux, Solaris, and Mac OS X operating systems.
|
||||
* <p>
|
||||
* On Linux it currently supports on the amd64 architecture, on Linux kernels 3.5 or above, and requires
|
||||
* On Linux it currently supports amd64 and i386 architectures, requires Linux kernel 3.5 or above, and requires
|
||||
* {@code CONFIG_SECCOMP} and {@code CONFIG_SECCOMP_FILTER} compiled into the kernel.
|
||||
* <p>
|
||||
* On Linux BPF Filters are installed using either {@code seccomp(2)} (3.17+) or {@code prctl(2)} (3.5+). {@code seccomp(2)}
|
||||
|
@ -95,12 +98,12 @@ final class Seccomp {
|
|||
/**
|
||||
* maps to prctl(2)
|
||||
*/
|
||||
int prctl(int option, long arg2, long arg3, long arg4, long arg5);
|
||||
int prctl(int option, NativeLong arg2, NativeLong arg3, NativeLong arg4, NativeLong arg5);
|
||||
/**
|
||||
* used to call seccomp(2), its too new...
|
||||
* this is the only way, DONT use it on some other architecture unless you know wtf you are doing
|
||||
*/
|
||||
long syscall(long number, Object... args);
|
||||
NativeLong syscall(NativeLong number, Object... args);
|
||||
};
|
||||
|
||||
// null if unavailable or something goes wrong.
|
||||
|
@ -119,7 +122,6 @@ final class Seccomp {
|
|||
}
|
||||
|
||||
/** the preferred method is seccomp(2), since we can apply to all threads of the process */
|
||||
static final int SECCOMP_SYSCALL_NR = 317; // since Linux 3.17
|
||||
static final int SECCOMP_SET_MODE_FILTER = 1; // since Linux 3.17
|
||||
static final int SECCOMP_FILTER_FLAG_TSYNC = 1; // since Linux 3.17
|
||||
|
||||
|
@ -190,7 +192,6 @@ final class Seccomp {
|
|||
return new SockFilter((short) code, (byte) jt, (byte) jf, k);
|
||||
}
|
||||
|
||||
static final int AUDIT_ARCH_X86_64 = 0xC000003E;
|
||||
static final int SECCOMP_RET_ERRNO = 0x00050000;
|
||||
static final int SECCOMP_RET_DATA = 0x0000FFFF;
|
||||
static final int SECCOMP_RET_ALLOW = 0x7FFF0000;
|
||||
|
@ -201,29 +202,63 @@ final class Seccomp {
|
|||
static final int EINVAL = 0x16;
|
||||
static final int ENOSYS = 0x26;
|
||||
|
||||
// offsets (arch dependent) that our BPF checks
|
||||
// offsets that our BPF checks
|
||||
// check with offsetof() when adding a new arch, move to Arch if different.
|
||||
static final int SECCOMP_DATA_NR_OFFSET = 0x00;
|
||||
static final int SECCOMP_DATA_ARCH_OFFSET = 0x04;
|
||||
|
||||
// currently these ranges are blocked (inclusive):
|
||||
// execve is really the only one needed but why let someone fork a 30G heap? (not really what happens)
|
||||
// ...
|
||||
// 57: fork
|
||||
// 58: vfork
|
||||
// 59: execve
|
||||
// ...
|
||||
// 322: execveat
|
||||
// ...
|
||||
static final int NR_SYSCALL_FORK = 57;
|
||||
static final int NR_SYSCALL_EXECVE = 59;
|
||||
static final int NR_SYSCALL_EXECVEAT = 322; // since Linux 3.19
|
||||
static final int NR_SYSCALL_TUXCALL = 184; // should return ENOSYS
|
||||
|
||||
static class Arch {
|
||||
/** AUDIT_ARCH_XXX constant from linux/audit.h */
|
||||
final int audit;
|
||||
/** syscall limit (necessary for blacklisting on amd64, to ban 32-bit syscalls) */
|
||||
final int limit;
|
||||
/** __NR_fork */
|
||||
final int fork;
|
||||
/** __NR_vfork */
|
||||
final int vfork;
|
||||
/** __NR_execve */
|
||||
final int execve;
|
||||
/** __NR_execveat */
|
||||
final int execveat;
|
||||
/** __NR_seccomp */
|
||||
final int seccomp;
|
||||
|
||||
Arch(int audit, int limit, int fork, int vfork, int execve, int execveat, int seccomp) {
|
||||
this.audit = audit;
|
||||
this.limit = limit;
|
||||
this.fork = fork;
|
||||
this.vfork = vfork;
|
||||
this.execve = execve;
|
||||
this.execveat = execveat;
|
||||
this.seccomp = seccomp;
|
||||
}
|
||||
}
|
||||
|
||||
/** supported architectures map keyed by os.arch */
|
||||
private static final Map<String,Arch> ARCHITECTURES;
|
||||
static {
|
||||
Map<String,Arch> m = new HashMap<>();
|
||||
m.put("amd64", new Arch(0xC000003E, 0x3FFFFFFF, 57, 58, 59, 322, 317));
|
||||
m.put("i386", new Arch(0x40000003, 0xFFFFFFFF, 2, 190, 11, 358, 354));
|
||||
ARCHITECTURES = Collections.unmodifiableMap(m);
|
||||
}
|
||||
|
||||
/** invokes prctl() from linux libc library */
|
||||
private static int linux_prctl(int option, long arg2, long arg3, long arg4, long arg5) {
|
||||
return linux_libc.prctl(option, new NativeLong(arg2), new NativeLong(arg3), new NativeLong(arg4), new NativeLong(arg5));
|
||||
}
|
||||
|
||||
/** invokes syscall() from linux libc library */
|
||||
private static long linux_syscall(long number, Object... args) {
|
||||
return linux_libc.syscall(new NativeLong(number), args).longValue();
|
||||
}
|
||||
|
||||
/** try to install our BPF filters via seccomp() or prctl() to block execution */
|
||||
private static int linuxImpl() {
|
||||
// first be defensive: we can give nice errors this way, at the very least.
|
||||
// also, some of these security features get backported to old versions, checking kernel version here is a big no-no!
|
||||
boolean supported = Constants.LINUX && "amd64".equals(Constants.OS_ARCH);
|
||||
final Arch arch = ARCHITECTURES.get(Constants.OS_ARCH);
|
||||
boolean supported = Constants.LINUX && arch != null;
|
||||
if (supported == false) {
|
||||
throw new UnsupportedOperationException("seccomp unavailable: '" + Constants.OS_ARCH + "' architecture unsupported");
|
||||
}
|
||||
|
@ -237,7 +272,7 @@ final class Seccomp {
|
|||
|
||||
// check that unimplemented syscalls actually return ENOSYS
|
||||
// you never know (e.g. https://code.google.com/p/chromium/issues/detail?id=439795)
|
||||
if (linux_libc.syscall(NR_SYSCALL_TUXCALL) >= 0 || Native.getLastError() != ENOSYS) {
|
||||
if (linux_syscall(999) >= 0 || Native.getLastError() != ENOSYS) {
|
||||
throw new UnsupportedOperationException("seccomp unavailable: your kernel is buggy and you should upgrade");
|
||||
}
|
||||
|
||||
|
@ -246,7 +281,7 @@ final class Seccomp {
|
|||
final int bogusArg = 0xf7a46a5c;
|
||||
|
||||
// test seccomp(BOGUS)
|
||||
long ret = linux_libc.syscall(SECCOMP_SYSCALL_NR, bogusArg);
|
||||
long ret = linux_syscall(arch.seccomp, bogusArg);
|
||||
if (ret != -1) {
|
||||
throw new UnsupportedOperationException("seccomp unavailable: seccomp(BOGUS_OPERATION) returned " + ret);
|
||||
} else {
|
||||
|
@ -259,7 +294,7 @@ final class Seccomp {
|
|||
}
|
||||
|
||||
// test seccomp(VALID, BOGUS)
|
||||
ret = linux_libc.syscall(SECCOMP_SYSCALL_NR, SECCOMP_SET_MODE_FILTER, bogusArg);
|
||||
ret = linux_syscall(arch.seccomp, SECCOMP_SET_MODE_FILTER, bogusArg);
|
||||
if (ret != -1) {
|
||||
throw new UnsupportedOperationException("seccomp unavailable: seccomp(SECCOMP_SET_MODE_FILTER, BOGUS_FLAG) returned " + ret);
|
||||
} else {
|
||||
|
@ -272,7 +307,7 @@ final class Seccomp {
|
|||
}
|
||||
|
||||
// test prctl(BOGUS)
|
||||
ret = linux_libc.prctl(bogusArg, 0, 0, 0, 0);
|
||||
ret = linux_prctl(bogusArg, 0, 0, 0, 0);
|
||||
if (ret != -1) {
|
||||
throw new UnsupportedOperationException("seccomp unavailable: prctl(BOGUS_OPTION) returned " + ret);
|
||||
} else {
|
||||
|
@ -287,7 +322,7 @@ final class Seccomp {
|
|||
// now just normal defensive checks
|
||||
|
||||
// check for GET_NO_NEW_PRIVS
|
||||
switch (linux_libc.prctl(PR_GET_NO_NEW_PRIVS, 0, 0, 0, 0)) {
|
||||
switch (linux_prctl(PR_GET_NO_NEW_PRIVS, 0, 0, 0, 0)) {
|
||||
case 0: break; // not yet set
|
||||
case 1: break; // already set by caller
|
||||
default:
|
||||
|
@ -299,7 +334,7 @@ final class Seccomp {
|
|||
}
|
||||
}
|
||||
// check for SECCOMP
|
||||
switch (linux_libc.prctl(PR_GET_SECCOMP, 0, 0, 0, 0)) {
|
||||
switch (linux_prctl(PR_GET_SECCOMP, 0, 0, 0, 0)) {
|
||||
case 0: break; // not yet set
|
||||
case 2: break; // already in filter mode by caller
|
||||
default:
|
||||
|
@ -311,7 +346,7 @@ final class Seccomp {
|
|||
}
|
||||
}
|
||||
// check for SECCOMP_MODE_FILTER
|
||||
if (linux_libc.prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, 0, 0, 0) != 0) {
|
||||
if (linux_prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, 0, 0, 0) != 0) {
|
||||
int errno = Native.getLastError();
|
||||
switch (errno) {
|
||||
case EFAULT: break; // available
|
||||
|
@ -321,27 +356,28 @@ final class Seccomp {
|
|||
}
|
||||
|
||||
// ok, now set PR_SET_NO_NEW_PRIVS, needed to be able to set a seccomp filter as ordinary user
|
||||
if (linux_libc.prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0) != 0) {
|
||||
if (linux_prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0) != 0) {
|
||||
throw new UnsupportedOperationException("prctl(PR_SET_NO_NEW_PRIVS): " + JNACLibrary.strerror(Native.getLastError()));
|
||||
}
|
||||
|
||||
// check it worked
|
||||
if (linux_libc.prctl(PR_GET_NO_NEW_PRIVS, 0, 0, 0, 0) != 1) {
|
||||
if (linux_prctl(PR_GET_NO_NEW_PRIVS, 0, 0, 0, 0) != 1) {
|
||||
throw new UnsupportedOperationException("seccomp filter did not really succeed: prctl(PR_GET_NO_NEW_PRIVS): " + JNACLibrary.strerror(Native.getLastError()));
|
||||
}
|
||||
|
||||
// BPF installed to check arch, then syscall range. See https://www.kernel.org/doc/Documentation/prctl/seccomp_filter.txt for details.
|
||||
// BPF installed to check arch, limit, then syscall. See https://www.kernel.org/doc/Documentation/prctl/seccomp_filter.txt for details.
|
||||
SockFilter insns[] = {
|
||||
/* 1 */ BPF_STMT(BPF_LD + BPF_W + BPF_ABS, SECCOMP_DATA_ARCH_OFFSET), //
|
||||
/* 2 */ BPF_JUMP(BPF_JMP + BPF_JEQ + BPF_K, AUDIT_ARCH_X86_64, 0, 4), // if (arch != amd64) goto fail;
|
||||
/* 3 */ BPF_STMT(BPF_LD + BPF_W + BPF_ABS, SECCOMP_DATA_NR_OFFSET), //
|
||||
/* 4 */ BPF_JUMP(BPF_JMP + BPF_JGE + BPF_K, NR_SYSCALL_FORK, 0, 3), // if (syscall < SYSCALL_FORK) goto pass;
|
||||
/* 5 */ BPF_JUMP(BPF_JMP + BPF_JEQ + BPF_K, NR_SYSCALL_EXECVEAT, 1, 0), // if (syscall == SYSCALL_EXECVEAT) goto fail;
|
||||
/* 6 */ BPF_JUMP(BPF_JMP + BPF_JGT + BPF_K, NR_SYSCALL_EXECVE, 1, 0), // if (syscall > SYSCALL_EXECVE) goto pass;
|
||||
/* 7 */ BPF_STMT(BPF_RET + BPF_K, SECCOMP_RET_ERRNO | (EACCES & SECCOMP_RET_DATA)), // fail: return EACCES;
|
||||
/* 8 */ BPF_STMT(BPF_RET + BPF_K, SECCOMP_RET_ALLOW) // pass: return OK;
|
||||
/* 1 */ BPF_STMT(BPF_LD + BPF_W + BPF_ABS, SECCOMP_DATA_ARCH_OFFSET), //
|
||||
/* 2 */ BPF_JUMP(BPF_JMP + BPF_JEQ + BPF_K, arch.audit, 0, 7), // if (arch != audit) goto fail;
|
||||
/* 3 */ BPF_STMT(BPF_LD + BPF_W + BPF_ABS, SECCOMP_DATA_NR_OFFSET), //
|
||||
/* 4 */ BPF_JUMP(BPF_JMP + BPF_JGT + BPF_K, arch.limit, 5, 0), // if (syscall > LIMIT) goto fail;
|
||||
/* 5 */ BPF_JUMP(BPF_JMP + BPF_JEQ + BPF_K, arch.fork, 4, 0), // if (syscall == FORK) goto fail;
|
||||
/* 6 */ BPF_JUMP(BPF_JMP + BPF_JEQ + BPF_K, arch.vfork, 3, 0), // if (syscall == VFORK) goto fail;
|
||||
/* 7 */ BPF_JUMP(BPF_JMP + BPF_JEQ + BPF_K, arch.execve, 2, 0), // if (syscall == EXECVE) goto fail;
|
||||
/* 8 */ BPF_JUMP(BPF_JMP + BPF_JEQ + BPF_K, arch.execveat, 1, 0), // if (syscall == EXECVEAT) goto fail;
|
||||
/* 9 */ BPF_STMT(BPF_RET + BPF_K, SECCOMP_RET_ALLOW), // pass: return OK;
|
||||
/* 10 */ BPF_STMT(BPF_RET + BPF_K, SECCOMP_RET_ERRNO | (EACCES & SECCOMP_RET_DATA)), // fail: return EACCES;
|
||||
};
|
||||
|
||||
// seccomp takes a long, so we pass it one explicitly to keep the JNA simple
|
||||
SockFProg prog = new SockFProg(insns);
|
||||
prog.write();
|
||||
|
@ -350,13 +386,13 @@ final class Seccomp {
|
|||
int method = 1;
|
||||
// install filter, if this works, after this there is no going back!
|
||||
// first try it with seccomp(SECCOMP_SET_MODE_FILTER), falling back to prctl()
|
||||
if (linux_libc.syscall(SECCOMP_SYSCALL_NR, SECCOMP_SET_MODE_FILTER, SECCOMP_FILTER_FLAG_TSYNC, pointer) != 0) {
|
||||
if (linux_syscall(arch.seccomp, SECCOMP_SET_MODE_FILTER, SECCOMP_FILTER_FLAG_TSYNC, new NativeLong(pointer)) != 0) {
|
||||
method = 0;
|
||||
int errno1 = Native.getLastError();
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("seccomp(SECCOMP_SET_MODE_FILTER): " + JNACLibrary.strerror(errno1) + ", falling back to prctl(PR_SET_SECCOMP)...");
|
||||
}
|
||||
if (linux_libc.prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, pointer, 0, 0) != 0) {
|
||||
if (linux_prctl(PR_SET_SECCOMP, SECCOMP_MODE_FILTER, pointer, 0, 0) != 0) {
|
||||
int errno2 = Native.getLastError();
|
||||
throw new UnsupportedOperationException("seccomp(SECCOMP_SET_MODE_FILTER): " + JNACLibrary.strerror(errno1) +
|
||||
", prctl(PR_SET_SECCOMP): " + JNACLibrary.strerror(errno2));
|
||||
|
@ -364,7 +400,7 @@ final class Seccomp {
|
|||
}
|
||||
|
||||
// now check that the filter was really installed, we should be in filter mode.
|
||||
if (linux_libc.prctl(PR_GET_SECCOMP, 0, 0, 0, 0) != 2) {
|
||||
if (linux_prctl(PR_GET_SECCOMP, 0, 0, 0, 0) != 2) {
|
||||
throw new UnsupportedOperationException("seccomp filter installation did not really succeed. seccomp(PR_GET_SECCOMP): " + JNACLibrary.strerror(Native.getLastError()));
|
||||
}
|
||||
|
||||
|
|
|
@ -20,12 +20,17 @@
|
|||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.SecureSM;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.http.netty.NettyHttpServerTransport;
|
||||
import org.elasticsearch.plugins.PluginInfo;
|
||||
import org.elasticsearch.transport.netty.NettyTransport;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.SocketPermission;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.AccessMode;
|
||||
|
@ -184,9 +189,18 @@ final class Security {
|
|||
}
|
||||
}
|
||||
|
||||
/** returns dynamic Permissions to configured paths */
|
||||
/** returns dynamic Permissions to configured paths and bind ports */
|
||||
static Permissions createPermissions(Environment environment) throws IOException {
|
||||
Permissions policy = new Permissions();
|
||||
addFilePermissions(policy, environment);
|
||||
addBindPermissions(policy, environment.settings());
|
||||
return policy;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds access to all configurable paths.
|
||||
*/
|
||||
static void addFilePermissions(Permissions policy, Environment environment) {
|
||||
// read-only dirs
|
||||
addPath(policy, "path.home", environment.binFile(), "read,readlink");
|
||||
addPath(policy, "path.home", environment.libFile(), "read,readlink");
|
||||
|
@ -212,7 +226,36 @@ final class Security {
|
|||
// we just need permission to remove the file if its elsewhere.
|
||||
policy.add(new FilePermission(environment.pidFile().toString(), "delete"));
|
||||
}
|
||||
return policy;
|
||||
}
|
||||
|
||||
static void addBindPermissions(Permissions policy, Settings settings) throws IOException {
|
||||
// http is simple
|
||||
String httpRange = settings.get("http.netty.port",
|
||||
settings.get("http.port",
|
||||
NettyHttpServerTransport.DEFAULT_PORT_RANGE));
|
||||
policy.add(new SocketPermission("localhost:" + httpRange, "listen,resolve"));
|
||||
// transport is waaaay overengineered
|
||||
Map<String, Settings> profiles = settings.getGroups("transport.profiles", true);
|
||||
if (!profiles.containsKey(NettyTransport.DEFAULT_PROFILE)) {
|
||||
profiles = new HashMap<>(profiles);
|
||||
profiles.put(NettyTransport.DEFAULT_PROFILE, Settings.EMPTY);
|
||||
}
|
||||
|
||||
// loop through all profiles and add permissions for each one, if its valid.
|
||||
// (otherwise NettyTransport is lenient and ignores it)
|
||||
for (Map.Entry<String, Settings> entry : profiles.entrySet()) {
|
||||
Settings profileSettings = entry.getValue();
|
||||
String name = entry.getKey();
|
||||
String transportRange = profileSettings.get("port",
|
||||
settings.get("transport.tcp.port",
|
||||
NettyTransport.DEFAULT_PORT_RANGE));
|
||||
|
||||
// a profile is only valid if its the default profile, or if it has an actual name and specifies a port
|
||||
boolean valid = NettyTransport.DEFAULT_PROFILE.equals(name) || (Strings.hasLength(name) && profileSettings.get("port") != null);
|
||||
if (valid) {
|
||||
policy.add(new SocketPermission("localhost:" + transportRange, "listen,resolve"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -177,7 +177,7 @@ public class ClusterModule extends AbstractModule {
|
|||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE);
|
||||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE);
|
||||
registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_MAX_SIZE_PER_SEC, Validator.BYTES_SIZE);
|
||||
registerClusterDynamicSetting(ThreadPool.THREADPOOL_GROUP + "*", Validator.EMPTY);
|
||||
registerClusterDynamicSetting(ThreadPool.THREADPOOL_GROUP + "*", ThreadPool.THREAD_POOL_TYPE_SETTINGS_VALIDATOR);
|
||||
registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, Validator.INTEGER);
|
||||
registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, Validator.INTEGER);
|
||||
registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, Validator.EMPTY);
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.cluster;
|
|||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.cluster.DiffableUtils.KeyedReader;
|
||||
import org.elasticsearch.cluster.block.ClusterBlock;
|
||||
import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||
|
@ -31,12 +30,7 @@ import org.elasticsearch.cluster.metadata.MappingMetaData;
|
|||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||
import org.elasticsearch.cluster.routing.RoutingNodes;
|
||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.*;
|
||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||
import org.elasticsearch.cluster.service.InternalClusterService;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -57,11 +51,7 @@ import org.elasticsearch.discovery.local.LocalDiscovery;
|
|||
import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Represents the current state of the cluster.
|
||||
|
@ -283,6 +273,7 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
|
|||
sb.append("state uuid: ").append(stateUUID).append("\n");
|
||||
sb.append("from_diff: ").append(wasReadFromDiff).append("\n");
|
||||
sb.append("meta data version: ").append(metaData.version()).append("\n");
|
||||
sb.append(blocks().prettyPrint());
|
||||
sb.append(nodes().prettyPrint());
|
||||
sb.append(routingTable().prettyPrint());
|
||||
sb.append(getRoutingNodes().prettyPrint());
|
||||
|
|
|
@ -155,8 +155,10 @@ public class ClusterBlock implements Streamable, ToXContent {
|
|||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(id).append(",").append(description).append(", blocks ");
|
||||
String delimiter = "";
|
||||
for (ClusterBlockLevel level : levels) {
|
||||
sb.append(level.name()).append(",");
|
||||
sb.append(delimiter).append(level.name());
|
||||
delimiter = ",";
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.cluster.block;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaDataIndexStateService;
|
||||
|
@ -199,6 +198,28 @@ public class ClusterBlocks extends AbstractDiffable<ClusterBlocks> {
|
|||
return new ClusterBlockException(unmodifiableSet(blocks.collect(toSet())));
|
||||
}
|
||||
|
||||
public String prettyPrint() {
|
||||
if (global.isEmpty() && indices().isEmpty()) {
|
||||
return "";
|
||||
}
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("blocks: \n");
|
||||
if (global.isEmpty() == false) {
|
||||
sb.append(" _global_:\n");
|
||||
for (ClusterBlock block : global) {
|
||||
sb.append(" ").append(block);
|
||||
}
|
||||
}
|
||||
for (ObjectObjectCursor<String, Set<ClusterBlock>> entry : indices()) {
|
||||
sb.append(" ").append(entry.key).append(":\n");
|
||||
for (ClusterBlock block : entry.value) {
|
||||
sb.append(" ").append(block);
|
||||
}
|
||||
}
|
||||
sb.append("\n");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
writeBlockSet(global, out);
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.indices.InvalidAliasNameException;
|
||||
|
||||
|
@ -113,14 +112,14 @@ public class AliasValidator extends AbstractComponent {
|
|||
|
||||
/**
|
||||
* Validates an alias filter by parsing it using the
|
||||
* provided {@link org.elasticsearch.index.query.IndexQueryParserService}
|
||||
* provided {@link org.elasticsearch.index.query.QueryShardContext}
|
||||
* @throws IllegalArgumentException if the filter is not valid
|
||||
*/
|
||||
public void validateAliasFilter(String alias, String filter, IndexQueryParserService indexQueryParserService) {
|
||||
assert indexQueryParserService != null;
|
||||
public void validateAliasFilter(String alias, String filter, QueryShardContext queryShardContext) {
|
||||
assert queryShardContext != null;
|
||||
try {
|
||||
XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
|
||||
validateAliasFilter(parser, indexQueryParserService);
|
||||
validateAliasFilter(parser, queryShardContext);
|
||||
} catch (Throwable e) {
|
||||
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
|
||||
}
|
||||
|
@ -128,26 +127,25 @@ public class AliasValidator extends AbstractComponent {
|
|||
|
||||
/**
|
||||
* Validates an alias filter by parsing it using the
|
||||
* provided {@link org.elasticsearch.index.query.IndexQueryParserService}
|
||||
* provided {@link org.elasticsearch.index.query.QueryShardContext}
|
||||
* @throws IllegalArgumentException if the filter is not valid
|
||||
*/
|
||||
public void validateAliasFilter(String alias, byte[] filter, IndexQueryParserService indexQueryParserService) {
|
||||
assert indexQueryParserService != null;
|
||||
public void validateAliasFilter(String alias, byte[] filter, QueryShardContext queryShardContext) {
|
||||
assert queryShardContext != null;
|
||||
try {
|
||||
XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
|
||||
validateAliasFilter(parser, indexQueryParserService);
|
||||
validateAliasFilter(parser, queryShardContext);
|
||||
} catch (Throwable e) {
|
||||
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void validateAliasFilter(XContentParser parser, IndexQueryParserService indexQueryParserService) throws IOException {
|
||||
QueryShardContext context = indexQueryParserService.getShardContext();
|
||||
private void validateAliasFilter(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
|
||||
try {
|
||||
context.reset(parser);
|
||||
context.parseContext().parseInnerQueryBuilder().toFilter(context);
|
||||
queryShardContext.reset(parser);
|
||||
queryShardContext.parseContext().parseInnerQueryBuilder().toFilter(queryShardContext);
|
||||
} finally {
|
||||
context.reset(null);
|
||||
queryShardContext.reset(null);
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -806,4 +806,17 @@ public class IndexNameExpressionResolver extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> iff the given expression resolves to the given index name otherwise <code>false</code>
|
||||
*/
|
||||
public final boolean matchesIndex(String indexName, String expression, ClusterState state) {
|
||||
final String[] concreteIndices = concreteIndices(state, IndicesOptions.lenientExpandOpen(), expression);
|
||||
for (String index : concreteIndices) {
|
||||
if (Regex.simpleMatch(index, indexName)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return indexName.equals(expression);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.util.CollectionUtil;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRunnable;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest;
|
||||
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
|
||||
|
@ -41,48 +40,38 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.ValidationException;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.indices.IndexAlreadyExistsException;
|
||||
import org.elasticsearch.indices.IndexCreationException;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.InvalidIndexNameException;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE;
|
||||
|
@ -107,12 +96,14 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
private final AliasValidator aliasValidator;
|
||||
private final IndexTemplateFilter indexTemplateFilter;
|
||||
private final Environment env;
|
||||
private final NodeServicesProvider nodeServicesProvider;
|
||||
|
||||
|
||||
@Inject
|
||||
public MetaDataCreateIndexService(Settings settings, ClusterService clusterService,
|
||||
IndicesService indicesService, AllocationService allocationService,
|
||||
Version version, AliasValidator aliasValidator,
|
||||
Set<IndexTemplateFilter> indexTemplateFilters, Environment env) {
|
||||
Set<IndexTemplateFilter> indexTemplateFilters, Environment env, NodeServicesProvider nodeServicesProvider) {
|
||||
super(settings);
|
||||
this.clusterService = clusterService;
|
||||
this.indicesService = indicesService;
|
||||
|
@ -120,6 +111,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
this.version = version;
|
||||
this.aliasValidator = aliasValidator;
|
||||
this.env = env;
|
||||
this.nodeServicesProvider = nodeServicesProvider;
|
||||
|
||||
if (indexTemplateFilters.isEmpty()) {
|
||||
this.indexTemplateFilter = DEFAULT_INDEX_TEMPLATE_FILTER;
|
||||
|
@ -307,7 +299,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
// Set up everything, now locally create the index to see that things are ok, and apply
|
||||
final IndexMetaData tmpImd = IndexMetaData.builder(request.index()).settings(actualIndexSettings).build();
|
||||
// create the index here (on the master) to validate it can be created, as well as adding the mapping
|
||||
indicesService.createIndex(tmpImd, Collections.EMPTY_LIST);
|
||||
indicesService.createIndex(nodeServicesProvider, tmpImd, Collections.EMPTY_LIST);
|
||||
indexCreated = true;
|
||||
// now add the mappings
|
||||
IndexService indexService = indicesService.indexServiceSafe(request.index());
|
||||
|
@ -334,15 +326,15 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
|
||||
IndexQueryParserService indexQueryParserService = indexService.queryParserService();
|
||||
QueryShardContext queryShardContext = indexService.getQueryShardContext();
|
||||
for (Alias alias : request.aliases()) {
|
||||
if (Strings.hasLength(alias.filter())) {
|
||||
aliasValidator.validateAliasFilter(alias.name(), alias.filter(), indexQueryParserService);
|
||||
aliasValidator.validateAliasFilter(alias.name(), alias.filter(), queryShardContext);
|
||||
}
|
||||
}
|
||||
for (AliasMetaData aliasMetaData : templatesAliases.values()) {
|
||||
if (aliasMetaData.filter() != null) {
|
||||
aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), indexQueryParserService);
|
||||
aliasValidator.validateAliasFilter(aliasMetaData.alias(), aliasMetaData.filter().uncompressed(), queryShardContext);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
|
||||
|
@ -49,12 +50,15 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
|
|||
|
||||
private final AliasValidator aliasValidator;
|
||||
|
||||
private final NodeServicesProvider nodeServicesProvider;
|
||||
|
||||
@Inject
|
||||
public MetaDataIndexAliasesService(Settings settings, ClusterService clusterService, IndicesService indicesService, AliasValidator aliasValidator) {
|
||||
public MetaDataIndexAliasesService(Settings settings, ClusterService clusterService, IndicesService indicesService, AliasValidator aliasValidator, NodeServicesProvider nodeServicesProvider) {
|
||||
super(settings);
|
||||
this.clusterService = clusterService;
|
||||
this.indicesService = indicesService;
|
||||
this.aliasValidator = aliasValidator;
|
||||
this.nodeServicesProvider = nodeServicesProvider;
|
||||
}
|
||||
|
||||
public void indicesAliases(final IndicesAliasesClusterStateUpdateRequest request, final ActionListener<ClusterStateUpdateResponse> listener) {
|
||||
|
@ -95,7 +99,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
|
|||
if (indexService == null) {
|
||||
// temporarily create the index and add mappings so we can parse the filter
|
||||
try {
|
||||
indexService = indicesService.createIndex(indexMetaData, Collections.EMPTY_LIST);
|
||||
indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST);
|
||||
if (indexMetaData.getMappings().containsKey(MapperService.DEFAULT_MAPPING)) {
|
||||
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.getMappings().get(MapperService.DEFAULT_MAPPING).source(), false, false);
|
||||
}
|
||||
|
@ -112,7 +116,7 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
|
|||
indices.put(indexMetaData.getIndex(), indexService);
|
||||
}
|
||||
|
||||
aliasValidator.validateAliasFilter(aliasAction.alias(), filter, indexService.queryParserService());
|
||||
aliasValidator.validateAliasFilter(aliasAction.alias(), filter, indexService.getQueryShardContext());
|
||||
}
|
||||
AliasMetaData newAliasMd = AliasMetaData.newAliasMetaDataBuilder(
|
||||
aliasAction.alias())
|
||||
|
|
|
@ -146,6 +146,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
|
|||
"index.translog.flush_threshold_period",
|
||||
"index.translog.interval",
|
||||
"index.translog.sync_interval",
|
||||
"index.shard.inactive_time",
|
||||
UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING));
|
||||
|
||||
/**
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.NodeServicesProvider;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
|
@ -57,12 +58,14 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
private final List<MappingTask> refreshOrUpdateQueue = new ArrayList<>();
|
||||
private long refreshOrUpdateInsertOrder;
|
||||
private long refreshOrUpdateProcessedInsertOrder;
|
||||
private final NodeServicesProvider nodeServicesProvider;
|
||||
|
||||
@Inject
|
||||
public MetaDataMappingService(Settings settings, ClusterService clusterService, IndicesService indicesService) {
|
||||
public MetaDataMappingService(Settings settings, ClusterService clusterService, IndicesService indicesService, NodeServicesProvider nodeServicesProvider) {
|
||||
super(settings);
|
||||
this.clusterService = clusterService;
|
||||
this.indicesService = indicesService;
|
||||
this.nodeServicesProvider = nodeServicesProvider;
|
||||
}
|
||||
|
||||
static class MappingTask {
|
||||
|
@ -172,7 +175,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
IndexService indexService = indicesService.indexService(index);
|
||||
if (indexService == null) {
|
||||
// we need to create the index here, and add the current mapping to it, so we can merge
|
||||
indexService = indicesService.createIndex(indexMetaData, Collections.EMPTY_LIST);
|
||||
indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST);
|
||||
removeIndex = true;
|
||||
Set<String> typesToIntroduce = new HashSet<>();
|
||||
for (MappingTask task : tasks) {
|
||||
|
@ -350,7 +353,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
continue;
|
||||
}
|
||||
final IndexMetaData indexMetaData = currentState.metaData().index(index);
|
||||
IndexService indexService = indicesService.createIndex(indexMetaData, Collections.EMPTY_LIST);
|
||||
IndexService indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.EMPTY_LIST);
|
||||
indicesToClose.add(indexMetaData.getIndex());
|
||||
// make sure to add custom default mapping if exists
|
||||
if (indexMetaData.getMappings().containsKey(MapperService.DEFAULT_MAPPING)) {
|
||||
|
|
|
@ -20,23 +20,22 @@
|
|||
package org.elasticsearch.common;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
|
||||
import java.util.EnumSet;
|
||||
|
||||
/**
|
||||
* Matcher to use in combination with {@link ParseField} while parsing requests. Matches a {@link ParseField}
|
||||
* against a field name and throw deprecation exception depending on the current value of the {@link IndexQueryParserService#PARSE_STRICT} setting.
|
||||
* against a field name and throw deprecation exception depending on the current value of the {@link #PARSE_STRICT} setting.
|
||||
*/
|
||||
public class ParseFieldMatcher {
|
||||
|
||||
public static final String PARSE_STRICT = "index.query.parse.strict";
|
||||
public static final ParseFieldMatcher EMPTY = new ParseFieldMatcher(ParseField.EMPTY_FLAGS);
|
||||
public static final ParseFieldMatcher STRICT = new ParseFieldMatcher(ParseField.STRICT_FLAGS);
|
||||
|
||||
private final EnumSet<ParseField.Flag> parseFlags;
|
||||
|
||||
public ParseFieldMatcher(Settings settings) {
|
||||
if (settings.getAsBoolean(IndexQueryParserService.PARSE_STRICT, false)) {
|
||||
if (settings.getAsBoolean(PARSE_STRICT, false)) {
|
||||
this.parseFlags = EnumSet.of(ParseField.Flag.STRICT);
|
||||
} else {
|
||||
this.parseFlags = ParseField.EMPTY_FLAGS;
|
||||
|
@ -49,7 +48,7 @@ public class ParseFieldMatcher {
|
|||
|
||||
/**
|
||||
* Matches a {@link ParseField} against a field name, and throws deprecation exception depending on the current
|
||||
* value of the {@link IndexQueryParserService#PARSE_STRICT} setting.
|
||||
* value of the {@link #PARSE_STRICT} setting.
|
||||
* @param fieldName the field name found in the request while parsing
|
||||
* @param parseField the parse field that we are looking for
|
||||
* @throws IllegalArgumentException whenever we are in strict mode and the request contained a deprecated field
|
||||
|
|
|
@ -28,7 +28,8 @@ public class Iterators {
|
|||
throw new NullPointerException("iterators");
|
||||
}
|
||||
|
||||
return new ConcatenatedIterator<>(iterators);
|
||||
// explicit generic type argument needed for type inference
|
||||
return new ConcatenatedIterator<T>(iterators);
|
||||
}
|
||||
|
||||
static class ConcatenatedIterator<T> implements Iterator<T> {
|
||||
|
|
|
@ -66,6 +66,19 @@ public class GeoUtils {
|
|||
/** Earth ellipsoid polar distance in meters */
|
||||
public static final double EARTH_POLAR_DISTANCE = Math.PI * EARTH_SEMI_MINOR_AXIS;
|
||||
|
||||
/** Returns the maximum distance/radius from the point 'center' before overlapping */
|
||||
public static double maxRadialDistance(GeoPoint center) {
|
||||
return SloppyMath.haversin(center.lat(), center.lon(), center.lat(), (180.0 + center.lon()) % 360)*1000.0;
|
||||
}
|
||||
|
||||
/** Returns the minimum between the provided distance 'initialRadius' and the
|
||||
* maximum distance/radius from the point 'center' before overlapping
|
||||
**/
|
||||
public static double maxRadialDistance(GeoPoint center, double initialRadius) {
|
||||
final double maxRadius = maxRadialDistance(center);
|
||||
return Math.min(initialRadius, maxRadius);
|
||||
}
|
||||
|
||||
/** Returns true if latitude is actually a valid latitude value.*/
|
||||
public static boolean isValidLatitude(double latitude) {
|
||||
if (Double.isNaN(latitude) || Double.isInfinite(latitude) || latitude < GeoUtils.MIN_LAT || latitude > GeoUtils.MAX_LAT) {
|
||||
|
|
|
@ -358,7 +358,7 @@ public class HttpDownloadHelper {
|
|||
connection.setConnectTimeout(5000);
|
||||
}
|
||||
connection.setRequestProperty("ES-Version", Version.CURRENT.toString());
|
||||
connection.setRequestProperty("ES-Build-Hash", Build.CURRENT.hashShort());
|
||||
connection.setRequestProperty("ES-Build-Hash", Build.CURRENT.shortHash());
|
||||
connection.setRequestProperty("User-Agent", "elasticsearch-plugin-manager");
|
||||
|
||||
// connect to the remote site (may take some time)
|
||||
|
|
|
@ -231,7 +231,7 @@ public class TimeValue implements Streamable {
|
|||
|
||||
public static TimeValue parseTimeValue(String sValue, TimeValue defaultValue, String settingName) {
|
||||
settingName = Objects.requireNonNull(settingName);
|
||||
assert settingName.startsWith("index.") == false || MetaDataIndexUpgradeService.INDEX_TIME_SETTINGS.contains(settingName);
|
||||
assert settingName.startsWith("index.") == false || MetaDataIndexUpgradeService.INDEX_TIME_SETTINGS.contains(settingName) : settingName;
|
||||
if (sValue == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
|
|
@ -214,8 +214,16 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
|
|||
|
||||
private final <T> List<T> parseArray(XContentParser parser, IOSupplier<T> supplier) throws IOException {
|
||||
List<T> list = new ArrayList<>();
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
list.add(supplier.get());
|
||||
if (parser.currentToken().isValue()) {
|
||||
list.add(supplier.get()); // single value
|
||||
} else {
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
if (parser.currentToken().isValue()) {
|
||||
list.add(supplier.get());
|
||||
} else {
|
||||
throw new IllegalStateException("expected value but got [" + parser.currentToken() + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
@ -224,6 +232,19 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
|
|||
declareField((p, v, c) -> consumer.accept(v, objectParser.apply(p, c)), field, ValueType.OBJECT);
|
||||
}
|
||||
|
||||
public <T> void declareObjectOrDefault(BiConsumer<Value, T> consumer, BiFunction<XContentParser, Context, T> objectParser, Supplier<T> defaultValue, ParseField field) {
|
||||
declareField((p, v, c) -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (p.booleanValue()) {
|
||||
consumer.accept(v, defaultValue.get());
|
||||
}
|
||||
} else {
|
||||
consumer.accept(v, objectParser.apply(p, c));
|
||||
}
|
||||
}, field, ValueType.OBJECT_OR_BOOLEAN);
|
||||
}
|
||||
|
||||
|
||||
public void declareFloat(BiConsumer<Value, Float> consumer, ParseField field) {
|
||||
declareField((p, v, c) -> consumer.accept(v, p.floatValue()), field, ValueType.FLOAT);
|
||||
}
|
||||
|
@ -240,6 +261,10 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
|
|||
declareField((p, v, c) -> consumer.accept(v, p.intValue()), field, ValueType.INT);
|
||||
}
|
||||
|
||||
public void declareValue(BiConsumer<Value, XContentParser> consumer, ParseField field) {
|
||||
declareField((p, v, c) -> consumer.accept(v, p), field, ValueType.VALUE);
|
||||
}
|
||||
|
||||
public void declareString(BiConsumer<Value, String> consumer, ParseField field) {
|
||||
declareField((p, v, c) -> consumer.accept(v, p.text()), field, ValueType.STRING);
|
||||
}
|
||||
|
@ -296,13 +321,15 @@ public final class ObjectParser<Value, Context> implements BiFunction<XContentPa
|
|||
DOUBLE(EnumSet.of(XContentParser.Token.VALUE_NUMBER, XContentParser.Token.VALUE_STRING)),
|
||||
LONG(EnumSet.of(XContentParser.Token.VALUE_NUMBER, XContentParser.Token.VALUE_STRING)),
|
||||
INT(EnumSet.of(XContentParser.Token.VALUE_NUMBER, XContentParser.Token.VALUE_STRING)),
|
||||
BOOLEAN(EnumSet.of(XContentParser.Token.VALUE_BOOLEAN)), STRING_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY)),
|
||||
FLOAT_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY)),
|
||||
DOUBLE_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY)),
|
||||
LONG_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY)),
|
||||
INT_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY)),
|
||||
BOOLEAN_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY)),
|
||||
OBJECT(EnumSet.of(XContentParser.Token.START_OBJECT));
|
||||
BOOLEAN(EnumSet.of(XContentParser.Token.VALUE_BOOLEAN)), STRING_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY, XContentParser.Token.VALUE_STRING)),
|
||||
FLOAT_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY, XContentParser.Token.VALUE_NUMBER, XContentParser.Token.VALUE_STRING)),
|
||||
DOUBLE_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY, XContentParser.Token.VALUE_NUMBER, XContentParser.Token.VALUE_STRING)),
|
||||
LONG_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY, XContentParser.Token.VALUE_NUMBER, XContentParser.Token.VALUE_STRING)),
|
||||
INT_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY, XContentParser.Token.VALUE_NUMBER, XContentParser.Token.VALUE_STRING)),
|
||||
BOOLEAN_ARRAY(EnumSet.of(XContentParser.Token.START_ARRAY, XContentParser.Token.VALUE_BOOLEAN)),
|
||||
OBJECT(EnumSet.of(XContentParser.Token.START_OBJECT)),
|
||||
OBJECT_OR_BOOLEAN(EnumSet.of(XContentParser.Token.START_OBJECT, XContentParser.Token.VALUE_BOOLEAN)),
|
||||
VALUE(EnumSet.of(XContentParser.Token.VALUE_BOOLEAN, XContentParser.Token.VALUE_NULL ,XContentParser.Token.VALUE_EMBEDDED_OBJECT,XContentParser.Token.VALUE_NUMBER,XContentParser.Token.VALUE_STRING));
|
||||
|
||||
private final EnumSet<XContentParser.Token> tokens;
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import com.fasterxml.jackson.dataformat.smile.SmileConstants;
|
|||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.xcontent.cbor.CborXContent;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.smile.SmileXContent;
|
||||
|
@ -33,7 +34,6 @@ import org.elasticsearch.common.xcontent.yaml.YamlXContent;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* A one stop to use {@link org.elasticsearch.common.xcontent.XContent} and {@link XContentBuilder}.
|
||||
|
@ -216,65 +216,9 @@ public class XContentFactory {
|
|||
}
|
||||
si.mark(GUESS_HEADER_LENGTH);
|
||||
try {
|
||||
final int firstInt = si.read(); // this must be an int since we need to respect the method contract
|
||||
if (firstInt == -1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final int secondInt = si.read(); // this must be an int since we need to respect the method contract
|
||||
if (secondInt == -1) {
|
||||
return null;
|
||||
}
|
||||
final byte first = (byte) (0xff & firstInt);
|
||||
final byte second = (byte) (0xff & secondInt);
|
||||
if (first == SmileConstants.HEADER_BYTE_1 && second == SmileConstants.HEADER_BYTE_2) {
|
||||
int third = si.read();
|
||||
if (third == SmileConstants.HEADER_BYTE_3) {
|
||||
return XContentType.SMILE;
|
||||
}
|
||||
}
|
||||
if (first == '{' || second == '{') {
|
||||
return XContentType.JSON;
|
||||
}
|
||||
if (first == '-' && second == '-') {
|
||||
int third = si.read();
|
||||
if (third == '-') {
|
||||
return XContentType.YAML;
|
||||
}
|
||||
}
|
||||
// CBOR logic similar to CBORFactory#hasCBORFormat
|
||||
if (first == CBORConstants.BYTE_OBJECT_INDEFINITE){
|
||||
return XContentType.CBOR;
|
||||
}
|
||||
if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first)) {
|
||||
// Actually, specific "self-describe tag" is a very good indicator
|
||||
int third = si.read();
|
||||
if (third == -1) {
|
||||
return null;
|
||||
}
|
||||
if (first == (byte) 0xD9 && second == (byte) 0xD9 && third == (byte) 0xF7) {
|
||||
return XContentType.CBOR;
|
||||
}
|
||||
}
|
||||
// for small objects, some encoders just encode as major type object, we can safely
|
||||
// say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort
|
||||
if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) {
|
||||
return XContentType.CBOR;
|
||||
}
|
||||
|
||||
for (int i = 2; i < GUESS_HEADER_LENGTH; i++) {
|
||||
int val = si.read();
|
||||
if (val == -1) {
|
||||
return null;
|
||||
}
|
||||
if (val == '{') {
|
||||
return XContentType.JSON;
|
||||
}
|
||||
if (Character.isWhitespace(val) == false) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
final byte[] firstBytes = new byte[GUESS_HEADER_LENGTH];
|
||||
final int read = Streams.readFully(si, firstBytes);
|
||||
return xContentType(new BytesArray(firstBytes, 0, read));
|
||||
} finally {
|
||||
si.reset();
|
||||
}
|
||||
|
@ -329,8 +273,14 @@ public class XContentFactory {
|
|||
return XContentType.CBOR;
|
||||
}
|
||||
|
||||
int jsonStart = 0;
|
||||
// JSON may be preceded by UTF-8 BOM
|
||||
if (length > 3 && first == (byte) 0xEF && bytes.get(1) == (byte) 0xBB && bytes.get(2) == (byte) 0xBF) {
|
||||
jsonStart = 3;
|
||||
}
|
||||
|
||||
// a last chance for JSON
|
||||
for (int i = 0; i < length; i++) {
|
||||
for (int i = jsonStart; i < length; i++) {
|
||||
byte b = bytes.get(i);
|
||||
if (b == '{') {
|
||||
return XContentType.JSON;
|
||||
|
|
|
@ -86,7 +86,7 @@ public class NodeJoinController extends AbstractComponent {
|
|||
@Override
|
||||
void onClose() {
|
||||
if (electionContext.compareAndSet(this, null)) {
|
||||
stopAccumulatingJoins();
|
||||
stopAccumulatingJoins("election closed");
|
||||
} else {
|
||||
assert false : "failed to remove current election context";
|
||||
}
|
||||
|
@ -156,7 +156,7 @@ public class NodeJoinController extends AbstractComponent {
|
|||
|
||||
/**
|
||||
* Accumulates any future incoming join request. Pending join requests will be processed in the final steps of becoming a
|
||||
* master or when {@link #stopAccumulatingJoins()} is called.
|
||||
* master or when {@link #stopAccumulatingJoins(String)} is called.
|
||||
*/
|
||||
public void startAccumulatingJoins() {
|
||||
logger.trace("starting to accumulate joins");
|
||||
|
@ -166,14 +166,14 @@ public class NodeJoinController extends AbstractComponent {
|
|||
}
|
||||
|
||||
/** Stopped accumulating joins. All pending joins will be processed. Future joins will be processed immediately */
|
||||
public void stopAccumulatingJoins() {
|
||||
logger.trace("stopping join accumulation");
|
||||
public void stopAccumulatingJoins(String reason) {
|
||||
logger.trace("stopping join accumulation ([{}])", reason);
|
||||
assert electionContext.get() == null : "stopAccumulatingJoins() called, but there is an ongoing election context";
|
||||
boolean b = accumulateJoins.getAndSet(false);
|
||||
assert b : "stopAccumulatingJoins() called but not accumulating";
|
||||
synchronized (pendingJoinRequests) {
|
||||
if (pendingJoinRequests.size() > 0) {
|
||||
processJoins("stopping to accumulate joins");
|
||||
processJoins("pending joins after accumulation stop [" + reason + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -210,7 +210,7 @@ public class NodeJoinController extends AbstractComponent {
|
|||
return;
|
||||
}
|
||||
|
||||
int pendingMasterJoins=0;
|
||||
int pendingMasterJoins = 0;
|
||||
synchronized (pendingJoinRequests) {
|
||||
for (DiscoveryNode node : pendingJoinRequests.keySet()) {
|
||||
if (node.isMasterNode()) {
|
||||
|
@ -219,7 +219,9 @@ public class NodeJoinController extends AbstractComponent {
|
|||
}
|
||||
}
|
||||
if (pendingMasterJoins < context.requiredMasterJoins) {
|
||||
logger.trace("not enough joins for election. Got [{}], required [{}]", pendingMasterJoins, context.requiredMasterJoins);
|
||||
if (context.pendingSetAsMasterTask.get() == false) {
|
||||
logger.trace("not enough joins for election. Got [{}], required [{}]", pendingMasterJoins, context.requiredMasterJoins);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (context.pendingSetAsMasterTask.getAndSet(true)) {
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.discovery.Discovery;
|
||||
import org.elasticsearch.discovery.DiscoverySettings;
|
||||
import org.elasticsearch.discovery.DiscoveryStats;
|
||||
import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats;
|
||||
import org.elasticsearch.discovery.InitialStateDiscoveryListener;
|
||||
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
|
||||
import org.elasticsearch.discovery.zen.fd.MasterFaultDetection;
|
||||
|
@ -53,6 +52,7 @@ import org.elasticsearch.discovery.zen.membership.MembershipAction;
|
|||
import org.elasticsearch.discovery.zen.ping.PingContextProvider;
|
||||
import org.elasticsearch.discovery.zen.ping.ZenPing;
|
||||
import org.elasticsearch.discovery.zen.ping.ZenPingService;
|
||||
import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats;
|
||||
import org.elasticsearch.discovery.zen.publish.PublishClusterStateAction;
|
||||
import org.elasticsearch.node.service.NodeService;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
|
@ -401,7 +401,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen
|
|||
);
|
||||
} else {
|
||||
// process any incoming joins (they will fail because we are not the master)
|
||||
nodeJoinController.stopAccumulatingJoins();
|
||||
nodeJoinController.stopAccumulatingJoins("not master");
|
||||
|
||||
// send join request
|
||||
final boolean success = joinElectedMaster(masterNode);
|
||||
|
|
|
@ -20,12 +20,7 @@
|
|||
package org.elasticsearch.gateway;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
|
||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ClusterStateListener;
|
||||
import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.*;
|
||||
import org.elasticsearch.cluster.block.ClusterBlock;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||
|
@ -264,6 +259,7 @@ public class GatewayService extends AbstractLifecycleComponent<GatewayService> i
|
|||
@Override
|
||||
public void onFailure(String source, Throwable t) {
|
||||
logger.error("unexpected failure during [{}]", t, source);
|
||||
GatewayRecoveryListener.this.onFailure("failed to updated cluster state");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -28,6 +28,10 @@ import java.io.IOException;
|
|||
*/
|
||||
public class BindHttpException extends HttpException {
|
||||
|
||||
public BindHttpException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public BindHttpException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
|
|
@ -189,7 +189,7 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
|
|||
sitePath = null;
|
||||
// If a trailing / is missing, we redirect to the right page #2654
|
||||
String redirectUrl = request.rawPath() + "/";
|
||||
BytesRestResponse restResponse = new BytesRestResponse(RestStatus.MOVED_PERMANENTLY, "text/html", "<head><meta http-equiv=\"refresh\" content=\"0; URL=" + redirectUrl + "></head>");
|
||||
BytesRestResponse restResponse = new BytesRestResponse(RestStatus.MOVED_PERMANENTLY, "text/html", "<head><meta http-equiv=\"refresh\" content=\"0; URL=" + redirectUrl + "\"></head>");
|
||||
restResponse.addHeader("Location", redirectUrl);
|
||||
channel.sendResponse(restResponse);
|
||||
return;
|
||||
|
|
|
@ -81,6 +81,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
|
|||
|
||||
public static final boolean DEFAULT_SETTING_PIPELINING = true;
|
||||
public static final int DEFAULT_SETTING_PIPELINING_MAX_EVENTS = 10000;
|
||||
public static final String DEFAULT_PORT_RANGE = "9200-9300";
|
||||
|
||||
protected final NetworkService networkService;
|
||||
protected final BigArrays bigArrays;
|
||||
|
@ -157,7 +158,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
|
|||
this.maxCompositeBufferComponents = settings.getAsInt("http.netty.max_composite_buffer_components", -1);
|
||||
this.workerCount = settings.getAsInt("http.netty.worker_count", EsExecutors.boundedNumberOfProcessors(settings) * 2);
|
||||
this.blockingServer = settings.getAsBoolean("http.netty.http.blocking_server", settings.getAsBoolean(TCP_BLOCKING_SERVER, settings.getAsBoolean(TCP_BLOCKING, false)));
|
||||
this.port = settings.get("http.netty.port", settings.get("http.port", "9200-9300"));
|
||||
this.port = settings.get("http.netty.port", settings.get("http.port", DEFAULT_PORT_RANGE));
|
||||
this.bindHosts = settings.getAsArray("http.netty.bind_host", settings.getAsArray("http.bind_host", settings.getAsArray("http.host", null)));
|
||||
this.publishHosts = settings.getAsArray("http.netty.publish_host", settings.getAsArray("http.publish_host", settings.getAsArray("http.host", null)));
|
||||
this.publishPort = settings.getAsInt("http.netty.publish_port", settings.getAsInt("http.publish_port", 0));
|
||||
|
@ -257,16 +258,28 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
|
|||
boundAddresses.add(bindAddress(address));
|
||||
}
|
||||
|
||||
InetSocketTransportAddress boundAddress = boundAddresses.get(0);
|
||||
InetSocketAddress publishAddress;
|
||||
if (0 == publishPort) {
|
||||
publishPort = boundAddress.getPort();
|
||||
}
|
||||
final InetAddress publishInetAddress;
|
||||
try {
|
||||
publishAddress = new InetSocketAddress(networkService.resolvePublishHostAddresses(publishHosts), publishPort);
|
||||
publishInetAddress = networkService.resolvePublishHostAddresses(publishHosts);
|
||||
} catch (Exception e) {
|
||||
throw new BindTransportException("Failed to resolve publish address", e);
|
||||
}
|
||||
|
||||
if (0 == publishPort) {
|
||||
for (InetSocketTransportAddress boundAddress : boundAddresses) {
|
||||
InetAddress boundInetAddress = boundAddress.address().getAddress();
|
||||
if (boundInetAddress.isAnyLocalAddress() || boundInetAddress.equals(publishInetAddress)) {
|
||||
publishPort = boundAddress.getPort();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (0 == publishPort) {
|
||||
throw new BindHttpException("Publish address [" + publishInetAddress + "] does not match any of the bound addresses [" + boundAddresses + "]");
|
||||
}
|
||||
|
||||
final InetSocketAddress publishAddress = new InetSocketAddress(publishInetAddress, publishPort);;
|
||||
this.boundAddress = new BoundTransportAddress(boundAddresses.toArray(new TransportAddress[boundAddresses.size()]), new InetSocketTransportAddress(publishAddress));
|
||||
}
|
||||
|
||||
|
|
|
@ -125,6 +125,18 @@ final class CompositeIndexEventListener implements IndexEventListener {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onShardActive(IndexShard indexShard) {
|
||||
for (IndexEventListener listener : listeners) {
|
||||
try {
|
||||
listener.onShardActive(indexShard);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("[{}] failed to invoke on shard active callback", t, indexShard.shardId().getId());
|
||||
throw t;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState, IndexShardState currentState, @Nullable String reason) {
|
||||
for (IndexEventListener listener : listeners) {
|
||||
|
|
|
@ -20,20 +20,14 @@
|
|||
package org.elasticsearch.index;
|
||||
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.QueryCache;
|
||||
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
|
||||
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.engine.InternalEngineFactory;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.similarity.BM25SimilarityProvider;
|
||||
|
@ -41,7 +35,7 @@ import org.elasticsearch.index.similarity.SimilarityProvider;
|
|||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.store.IndexStore;
|
||||
import org.elasticsearch.index.store.IndexStoreConfig;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -61,7 +55,7 @@ import java.util.function.Consumer;
|
|||
* <li>Settings update listener - Custom settings update listener can be registered via {@link #addIndexSettingsListener(Consumer)}</li>
|
||||
* </ul>
|
||||
*/
|
||||
public final class IndexModule extends AbstractModule {
|
||||
public final class IndexModule {
|
||||
|
||||
public static final String STORE_TYPE = "index.store.type";
|
||||
public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity";
|
||||
|
@ -72,10 +66,9 @@ public final class IndexModule extends AbstractModule {
|
|||
public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
|
||||
private final IndexSettings indexSettings;
|
||||
private final IndexStoreConfig indexStoreConfig;
|
||||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final AnalysisRegistry analysisRegistry;
|
||||
// pkg private so tests can mock
|
||||
Class<? extends EngineFactory> engineFactoryImpl = InternalEngineFactory.class;
|
||||
final SetOnce<EngineFactory> engineFactory = new SetOnce<>();
|
||||
private SetOnce<IndexSearcherWrapperFactory> indexSearcherWrapper = new SetOnce<>();
|
||||
private final Set<Consumer<Settings>> settingsConsumers = new HashSet<>();
|
||||
private final Set<IndexEventListener> indexEventListeners = new HashSet<>();
|
||||
|
@ -83,14 +76,11 @@ public final class IndexModule extends AbstractModule {
|
|||
private final Map<String, BiFunction<String, Settings, SimilarityProvider>> similarities = new HashMap<>();
|
||||
private final Map<String, BiFunction<IndexSettings, IndexStoreConfig, IndexStore>> storeTypes = new HashMap<>();
|
||||
private final Map<String, BiFunction<IndexSettings, IndicesQueryCache, QueryCache>> queryCaches = new HashMap<>();
|
||||
private IndicesWarmer indicesWarmer;
|
||||
|
||||
|
||||
public IndexModule(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig, IndicesQueryCache indicesQueryCache, IndicesWarmer warmer, AnalysisRegistry analysisRegistry) {
|
||||
public IndexModule(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig, AnalysisRegistry analysisRegistry) {
|
||||
this.indexStoreConfig = indexStoreConfig;
|
||||
this.indexSettings = indexSettings;
|
||||
this.indicesQueryCache = indicesQueryCache;
|
||||
this.indicesWarmer = warmer;
|
||||
this.analysisRegistry = analysisRegistry;
|
||||
registerQueryCache(INDEX_QUERY_CACHE, IndexQueryCache::new);
|
||||
registerQueryCache(NONE_QUERY_CACHE, (a, b) -> new NoneQueryCache(a));
|
||||
|
@ -220,50 +210,6 @@ public final class IndexModule extends AbstractModule {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
try {
|
||||
bind(AnalysisService.class).toInstance(analysisRegistry.build(indexSettings));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchException("can't create analysis service", e);
|
||||
}
|
||||
bind(EngineFactory.class).to(engineFactoryImpl).asEagerSingleton();
|
||||
bind(IndexSearcherWrapperFactory.class).toInstance(indexSearcherWrapper.get() == null ? (shard) -> null : indexSearcherWrapper.get());
|
||||
bind(IndexEventListener.class).toInstance(freeze());
|
||||
bind(IndexService.class).asEagerSingleton();
|
||||
bind(IndexServicesProvider.class).asEagerSingleton();
|
||||
bind(MapperService.class).asEagerSingleton();
|
||||
bind(IndexFieldDataService.class).asEagerSingleton();
|
||||
final IndexSettings settings = new IndexSettings(indexSettings.getIndexMetaData(), indexSettings.getNodeSettings(), settingsConsumers);
|
||||
bind(IndexSettings.class).toInstance(settings);
|
||||
|
||||
final String storeType = settings.getSettings().get(STORE_TYPE);
|
||||
final IndexStore store;
|
||||
if (storeType == null || isBuiltinType(storeType)) {
|
||||
store = new IndexStore(settings, indexStoreConfig);
|
||||
} else {
|
||||
BiFunction<IndexSettings, IndexStoreConfig, IndexStore> factory = storeTypes.get(storeType);
|
||||
if (factory == null) {
|
||||
throw new IllegalArgumentException("Unknown store type [" + storeType + "]");
|
||||
}
|
||||
store = factory.apply(settings, indexStoreConfig);
|
||||
if (store == null) {
|
||||
throw new IllegalStateException("store must not be null");
|
||||
}
|
||||
}
|
||||
|
||||
final String queryCacheType = settings.getSettings().get(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE);
|
||||
BiFunction<IndexSettings, IndicesQueryCache, QueryCache> queryCacheProvider = queryCaches.get(queryCacheType);
|
||||
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, indicesWarmer);
|
||||
QueryCache queryCache = queryCacheProvider.apply(settings, indicesQueryCache);
|
||||
IndexCache indexCache = new IndexCache(settings, queryCache, bitsetFilterCache);
|
||||
bind(QueryCache.class).toInstance(queryCache);
|
||||
bind(IndexCache.class).toInstance(indexCache);
|
||||
bind(BitsetFilterCache.class).toInstance(bitsetFilterCache);
|
||||
bind(IndexStore.class).toInstance(store);
|
||||
bind(SimilarityService.class).toInstance(new SimilarityService(settings, similarities));
|
||||
}
|
||||
|
||||
public enum Type {
|
||||
NIOFS,
|
||||
MMAPFS,
|
||||
|
@ -291,4 +237,29 @@ public final class IndexModule extends AbstractModule {
|
|||
*/
|
||||
IndexSearcherWrapper newWrapper(final IndexService indexService);
|
||||
}
|
||||
|
||||
public IndexService newIndexService(NodeEnvironment environment, IndexService.ShardStoreDeleter shardStoreDeleter, NodeServicesProvider servicesProvider) throws IOException {
|
||||
final IndexSettings settings = indexSettings.newWithListener(settingsConsumers);
|
||||
IndexSearcherWrapperFactory searcherWrapperFactory = indexSearcherWrapper.get() == null ? (shard) -> null : indexSearcherWrapper.get();
|
||||
IndexEventListener eventListener = freeze();
|
||||
final String storeType = settings.getSettings().get(STORE_TYPE);
|
||||
final IndexStore store;
|
||||
if (storeType == null || isBuiltinType(storeType)) {
|
||||
store = new IndexStore(settings, indexStoreConfig);
|
||||
} else {
|
||||
BiFunction<IndexSettings, IndexStoreConfig, IndexStore> factory = storeTypes.get(storeType);
|
||||
if (factory == null) {
|
||||
throw new IllegalArgumentException("Unknown store type [" + storeType + "]");
|
||||
}
|
||||
store = factory.apply(settings, indexStoreConfig);
|
||||
if (store == null) {
|
||||
throw new IllegalStateException("store must not be null");
|
||||
}
|
||||
}
|
||||
final String queryCacheType = settings.getSettings().get(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE);
|
||||
final BiFunction<IndexSettings, IndicesQueryCache, QueryCache> queryCacheProvider = queryCaches.get(queryCacheType);
|
||||
final QueryCache queryCache = queryCacheProvider.apply(settings, servicesProvider.getIndicesQueryCache());
|
||||
return new IndexService(settings, environment, new SimilarityService(settings, similarities), shardStoreDeleter, analysisRegistry, engineFactory.get(),
|
||||
servicesProvider, queryCache, store, eventListener, searcherWrapperFactory);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,16 +35,19 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.env.ShardLock;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.cache.query.QueryCache;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.*;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.store.IndexStore;
|
||||
|
@ -65,17 +68,21 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable<IndexShard> {
|
||||
public final class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable<IndexShard>{
|
||||
|
||||
private final IndexEventListener eventListener;
|
||||
private final AnalysisService analysisService;
|
||||
private final IndexFieldDataService indexFieldData;
|
||||
private final BitsetFilterCache bitsetFilterCache;
|
||||
private final NodeEnvironment nodeEnv;
|
||||
private final IndicesService indicesServices;
|
||||
private final IndexServicesProvider indexServicesProvider;
|
||||
private final ShardStoreDeleter shardStoreDeleter;
|
||||
private final NodeServicesProvider nodeServicesProvider;
|
||||
private final IndexStore indexStore;
|
||||
private final IndexSearcherWrapper searcherWrapper;
|
||||
private final IndexCache indexCache;
|
||||
private final MapperService mapperService;
|
||||
private final SimilarityService similarityService;
|
||||
private final EngineFactory engineFactory;
|
||||
private volatile Map<Integer, IndexShard> shards = emptyMap();
|
||||
private final AtomicBoolean closed = new AtomicBoolean(false);
|
||||
private final AtomicBoolean deleted = new AtomicBoolean(false);
|
||||
|
@ -83,26 +90,31 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
|
||||
@Inject
|
||||
public IndexService(IndexSettings indexSettings, NodeEnvironment nodeEnv,
|
||||
AnalysisService analysisService,
|
||||
IndexFieldDataService indexFieldData,
|
||||
BitsetFilterCache bitSetFilterCache,
|
||||
IndicesService indicesServices,
|
||||
IndexServicesProvider indexServicesProvider,
|
||||
SimilarityService similarityService,
|
||||
ShardStoreDeleter shardStoreDeleter,
|
||||
AnalysisRegistry registry,
|
||||
@Nullable EngineFactory engineFactory,
|
||||
NodeServicesProvider nodeServicesProvider,
|
||||
QueryCache queryCache,
|
||||
IndexStore indexStore,
|
||||
IndexEventListener eventListener,
|
||||
IndexModule.IndexSearcherWrapperFactory wrapperFactory) {
|
||||
IndexModule.IndexSearcherWrapperFactory wrapperFactory) throws IOException {
|
||||
super(indexSettings);
|
||||
this.indexSettings = indexSettings;
|
||||
this.analysisService = analysisService;
|
||||
this.indexFieldData = indexFieldData;
|
||||
this.bitsetFilterCache = bitSetFilterCache;
|
||||
this.indicesServices = indicesServices;
|
||||
this.analysisService = registry.build(indexSettings);
|
||||
this.similarityService = similarityService;
|
||||
this.mapperService = new MapperService(indexSettings, analysisService, similarityService);
|
||||
this.indexFieldData = new IndexFieldDataService(indexSettings, nodeServicesProvider.getIndicesFieldDataCache(), nodeServicesProvider.getCircuitBreakerService(), mapperService);
|
||||
this.shardStoreDeleter = shardStoreDeleter;
|
||||
this.eventListener = eventListener;
|
||||
this.nodeEnv = nodeEnv;
|
||||
this.indexServicesProvider = indexServicesProvider;
|
||||
this.nodeServicesProvider = nodeServicesProvider;
|
||||
this.indexStore = indexStore;
|
||||
indexFieldData.setListener(new FieldDataCacheListener(this));
|
||||
bitSetFilterCache.setListener(new BitsetCacheListener(this));
|
||||
this.bitsetFilterCache = new BitsetFilterCache(indexSettings, nodeServicesProvider.getWarmer(), new BitsetCacheListener(this));
|
||||
this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache);
|
||||
this.engineFactory = engineFactory;
|
||||
// initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE
|
||||
this.searcherWrapper = wrapperFactory.newWrapper(this);
|
||||
}
|
||||
|
||||
|
@ -145,43 +157,37 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
public Set<Integer> shardIds() { return shards.keySet(); }
|
||||
|
||||
public IndexCache cache() {
|
||||
return indexServicesProvider.getIndexCache();
|
||||
return indexCache;
|
||||
}
|
||||
|
||||
public IndexFieldDataService fieldData() {
|
||||
return indexFieldData;
|
||||
}
|
||||
|
||||
public BitsetFilterCache bitsetFilterCache() {
|
||||
return bitsetFilterCache;
|
||||
}
|
||||
public IndexFieldDataService fieldData() { return indexFieldData; }
|
||||
|
||||
public AnalysisService analysisService() {
|
||||
return this.analysisService;
|
||||
}
|
||||
|
||||
public MapperService mapperService() {
|
||||
return indexServicesProvider.getMapperService();
|
||||
}
|
||||
|
||||
public IndexQueryParserService queryParserService() {
|
||||
return indexServicesProvider.getQueryParserService();
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public SimilarityService similarityService() {
|
||||
return indexServicesProvider.getSimilarityService();
|
||||
return similarityService;
|
||||
}
|
||||
|
||||
public synchronized void close(final String reason, boolean delete) {
|
||||
public synchronized void close(final String reason, boolean delete) throws IOException {
|
||||
if (closed.compareAndSet(false, true)) {
|
||||
deleted.compareAndSet(false, delete);
|
||||
final Set<Integer> shardIds = shardIds();
|
||||
for (final int shardId : shardIds) {
|
||||
try {
|
||||
removeShard(shardId, reason);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("failed to close shard", t);
|
||||
try {
|
||||
final Set<Integer> shardIds = shardIds();
|
||||
for (final int shardId : shardIds) {
|
||||
try {
|
||||
removeShard(shardId, reason);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("failed to close shard", t);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
IOUtils.close(bitsetFilterCache, indexCache, mapperService, indexFieldData, analysisService);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -266,11 +272,11 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
// if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary.
|
||||
final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false ||
|
||||
(primary && IndexMetaData.isOnSharedFilesystem(indexSettings));
|
||||
store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> indexServicesProvider.getIndicesQueryCache().onClose(shardId)));
|
||||
store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> nodeServicesProvider.getIndicesQueryCache().onClose(shardId)));
|
||||
if (useShadowEngine(primary, indexSettings)) {
|
||||
indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, searcherWrapper, indexServicesProvider);
|
||||
indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldData, engineFactory, eventListener, searcherWrapper, nodeServicesProvider);
|
||||
} else {
|
||||
indexShard = new IndexShard(shardId, this.indexSettings, path, store, searcherWrapper, indexServicesProvider);
|
||||
indexShard = new IndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldData, engineFactory, eventListener, searcherWrapper, nodeServicesProvider);
|
||||
}
|
||||
|
||||
eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created");
|
||||
|
@ -343,25 +349,29 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
try {
|
||||
eventListener.beforeIndexShardDeleted(lock.getShardId(), indexSettings);
|
||||
} finally {
|
||||
indicesServices.deleteShardStore("delete index", lock, indexSettings);
|
||||
shardStoreDeleter.deleteShardStore("delete index", lock, indexSettings);
|
||||
eventListener.afterIndexShardDeleted(lock.getShardId(), indexSettings);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
indicesServices.addPendingDelete(lock.getShardId(), indexSettings);
|
||||
shardStoreDeleter.addPendingDelete(lock.getShardId(), indexSettings);
|
||||
logger.debug("[{}] failed to delete shard content - scheduled a retry", e, lock.getShardId().id());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public IndexServicesProvider getIndexServices() {
|
||||
return indexServicesProvider;
|
||||
public NodeServicesProvider getIndexServices() {
|
||||
return nodeServicesProvider;
|
||||
}
|
||||
|
||||
public IndexSettings getIndexSettings() {
|
||||
return indexSettings;
|
||||
}
|
||||
|
||||
public QueryShardContext getQueryShardContext() {
|
||||
return new QueryShardContext(indexSettings, nodeServicesProvider.getClient(), indexCache.bitsetFilterCache(), indexFieldData, mapperService(), similarityService(), nodeServicesProvider.getScriptService(), nodeServicesProvider.getIndicesQueriesRegistry());
|
||||
}
|
||||
|
||||
private class StoreCloseListener implements Store.OnClose {
|
||||
private final ShardId shardId;
|
||||
private final boolean ownsShard;
|
||||
|
@ -452,11 +462,10 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
* The list of filtering aliases should be obtained by calling MetaData.filteringAliases.
|
||||
* Returns <tt>null</tt> if no filtering is required.</p>
|
||||
*/
|
||||
public Query aliasFilter(String... aliasNames) {
|
||||
public Query aliasFilter(QueryShardContext context, String... aliasNames) {
|
||||
if (aliasNames == null || aliasNames.length == 0) {
|
||||
return null;
|
||||
}
|
||||
final IndexQueryParserService indexQueryParser = queryParserService();
|
||||
final ImmutableOpenMap<String, AliasMetaData> aliases = indexSettings.getIndexMetaData().getAliases();
|
||||
if (aliasNames.length == 1) {
|
||||
AliasMetaData alias = aliases.get(aliasNames[0]);
|
||||
|
@ -464,7 +473,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
// This shouldn't happen unless alias disappeared after filteringAliases was called.
|
||||
throw new InvalidAliasNameException(index(), aliasNames[0], "Unknown alias name was passed to alias Filter");
|
||||
}
|
||||
return parse(alias, indexQueryParser);
|
||||
return parse(alias, context);
|
||||
} else {
|
||||
// we need to bench here a bit, to see maybe it makes sense to use OrFilter
|
||||
BooleanQuery.Builder combined = new BooleanQuery.Builder();
|
||||
|
@ -472,9 +481,9 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
AliasMetaData alias = aliases.get(aliasName);
|
||||
if (alias == null) {
|
||||
// This shouldn't happen unless alias disappeared after filteringAliases was called.
|
||||
throw new InvalidAliasNameException(indexQueryParser.index(), aliasNames[0], "Unknown alias name was passed to alias Filter");
|
||||
throw new InvalidAliasNameException(indexSettings.getIndex(), aliasNames[0], "Unknown alias name was passed to alias Filter");
|
||||
}
|
||||
Query parsedFilter = parse(alias, indexQueryParser);
|
||||
Query parsedFilter = parse(alias, context);
|
||||
if (parsedFilter != null) {
|
||||
combined.add(parsedFilter, BooleanClause.Occur.SHOULD);
|
||||
} else {
|
||||
|
@ -486,18 +495,18 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
}
|
||||
}
|
||||
|
||||
private Query parse(AliasMetaData alias, IndexQueryParserService indexQueryParser) {
|
||||
private Query parse(AliasMetaData alias, QueryShardContext parseContext) {
|
||||
if (alias.filter() == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
byte[] filterSource = alias.filter().uncompressed();
|
||||
try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) {
|
||||
ParsedQuery parsedFilter = indexQueryParser.parseInnerFilter(parser);
|
||||
ParsedQuery parsedFilter = parseContext.parseInnerFilter(parser);
|
||||
return parsedFilter == null ? null : parsedFilter.query();
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new AliasFilterParsingException(indexQueryParser.index(), alias.getAlias(), "Invalid alias filter", ex);
|
||||
throw new AliasFilterParsingException(parseContext.index(), alias.getAlias(), "Invalid alias filter", ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -522,4 +531,23 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public interface ShardStoreDeleter {
|
||||
void deleteShardStore(String reason, ShardLock lock, Settings indexSettings) throws IOException;
|
||||
void addPendingDelete(ShardId shardId, Settings indexSettings);
|
||||
}
|
||||
|
||||
final EngineFactory getEngineFactory() {
|
||||
return engineFactory;
|
||||
} // pkg private for testing
|
||||
|
||||
final IndexSearcherWrapper getSearcherWrapper() {
|
||||
return searcherWrapper;
|
||||
} // pkg private for testing
|
||||
|
||||
final IndexStore getIndexStore() {
|
||||
return indexStore;
|
||||
} // pkg private for testing
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,132 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.cache.IndexCache;
|
||||
import org.elasticsearch.index.codec.CodecService;
|
||||
import org.elasticsearch.index.engine.EngineFactory;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexSearcherWrapper;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
/**
|
||||
* Simple provider class that holds the Index and Node level services used by
|
||||
* a shard.
|
||||
* This is just a temporary solution until we cleaned up index creation and removed injectors on that level as well.
|
||||
*/
|
||||
public final class IndexServicesProvider {
|
||||
|
||||
private final ThreadPool threadPool;
|
||||
private final MapperService mapperService;
|
||||
private final IndexQueryParserService queryParserService;
|
||||
private final IndexCache indexCache;
|
||||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final CodecService codecService;
|
||||
private final TermVectorsService termVectorsService;
|
||||
private final IndexFieldDataService indexFieldDataService;
|
||||
private final IndicesWarmer warmer;
|
||||
private final SimilarityService similarityService;
|
||||
private final EngineFactory factory;
|
||||
private final BigArrays bigArrays;
|
||||
private final IndexingMemoryController indexingMemoryController;
|
||||
private final IndexEventListener listener;
|
||||
|
||||
@Inject
|
||||
public IndexServicesProvider(IndexEventListener listener, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, IndexingMemoryController indexingMemoryController) {
|
||||
this.listener = listener;
|
||||
this.threadPool = threadPool;
|
||||
this.mapperService = mapperService;
|
||||
this.queryParserService = queryParserService;
|
||||
this.indexCache = indexCache;
|
||||
this.indicesQueryCache = indicesQueryCache;
|
||||
this.codecService = codecService;
|
||||
this.termVectorsService = termVectorsService;
|
||||
this.indexFieldDataService = indexFieldDataService;
|
||||
this.warmer = warmer;
|
||||
this.similarityService = similarityService;
|
||||
this.factory = factory;
|
||||
this.bigArrays = bigArrays;
|
||||
this.indexingMemoryController = indexingMemoryController;
|
||||
}
|
||||
|
||||
public IndexEventListener getIndexEventListener() {
|
||||
return listener;
|
||||
}
|
||||
public ThreadPool getThreadPool() {
|
||||
return threadPool;
|
||||
}
|
||||
|
||||
public MapperService getMapperService() {
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public IndexQueryParserService getQueryParserService() {
|
||||
return queryParserService;
|
||||
}
|
||||
|
||||
public IndexCache getIndexCache() {
|
||||
return indexCache;
|
||||
}
|
||||
|
||||
public IndicesQueryCache getIndicesQueryCache() {
|
||||
return indicesQueryCache;
|
||||
}
|
||||
|
||||
public CodecService getCodecService() {
|
||||
return codecService;
|
||||
}
|
||||
|
||||
public TermVectorsService getTermVectorsService() {
|
||||
return termVectorsService;
|
||||
}
|
||||
|
||||
public IndexFieldDataService getIndexFieldDataService() {
|
||||
return indexFieldDataService;
|
||||
}
|
||||
|
||||
public IndicesWarmer getWarmer() {
|
||||
return warmer;
|
||||
}
|
||||
|
||||
public SimilarityService getSimilarityService() {
|
||||
return similarityService;
|
||||
}
|
||||
|
||||
public EngineFactory getFactory() {
|
||||
return factory;
|
||||
}
|
||||
|
||||
public BigArrays getBigArrays() { return bigArrays; }
|
||||
|
||||
public IndexingMemoryController getIndexingMemoryController() {
|
||||
return indexingMemoryController;
|
||||
}
|
||||
}
|
|
@ -20,16 +20,20 @@ package org.elasticsearch.index;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
* This class encapsulates all index level settings and handles settings updates.
|
||||
|
@ -39,6 +43,12 @@ import java.util.function.Consumer;
|
|||
* be called for each settings update.
|
||||
*/
|
||||
public final class IndexSettings {
|
||||
|
||||
public static final String DEFAULT_FIELD = "index.query.default_field";
|
||||
public static final String QUERY_STRING_LENIENT = "index.query_string.lenient";
|
||||
public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard";
|
||||
public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard";
|
||||
public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields";
|
||||
private final String uuid;
|
||||
private final List<Consumer<Settings>> updateListeners;
|
||||
private final Index index;
|
||||
|
@ -48,10 +58,51 @@ public final class IndexSettings {
|
|||
private final Settings nodeSettings;
|
||||
private final int numberOfShards;
|
||||
private final boolean isShadowReplicaIndex;
|
||||
private final ParseFieldMatcher parseFieldMatcher;
|
||||
// volatile fields are updated via #updateIndexMetaData(IndexMetaData) under lock
|
||||
private volatile Settings settings;
|
||||
private volatile IndexMetaData indexMetaData;
|
||||
private final String defaultField;
|
||||
private final boolean queryStringLenient;
|
||||
private final boolean queryStringAnalyzeWildcard;
|
||||
private final boolean queryStringAllowLeadingWildcard;
|
||||
private final boolean defaultAllowUnmappedFields;
|
||||
private final Predicate<String> indexNameMatcher;
|
||||
|
||||
/**
|
||||
* Returns the default search field for this index.
|
||||
*/
|
||||
public String getDefaultField() {
|
||||
return defaultField;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if query string parsing should be lenient. The default is <code>false</code>
|
||||
*/
|
||||
public boolean isQueryStringLenient() {
|
||||
return queryStringLenient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if the query string should analyze wildcards. The default is <code>false</code>
|
||||
*/
|
||||
public boolean isQueryStringAnalyzeWildcard() {
|
||||
return queryStringAnalyzeWildcard;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if the query string parser should allow leading wildcards. The default is <code>true</code>
|
||||
*/
|
||||
public boolean isQueryStringAllowLeadingWildcard() {
|
||||
return queryStringAllowLeadingWildcard;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if queries should be lenient about unmapped fields. The default is <code>true</code>
|
||||
*/
|
||||
public boolean isDefaultAllowUnmappedFields() {
|
||||
return defaultAllowUnmappedFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link IndexSettings} instance. The given node settings will be merged with the settings in the metadata
|
||||
|
@ -62,6 +113,19 @@ public final class IndexSettings {
|
|||
* @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated
|
||||
*/
|
||||
public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection<Consumer<Settings>> updateListeners) {
|
||||
this(indexMetaData, nodeSettings, updateListeners, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link IndexSettings} instance. The given node settings will be merged with the settings in the metadata
|
||||
* while index level settings will overwrite node settings.
|
||||
*
|
||||
* @param indexMetaData the index metadata this settings object is associated with
|
||||
* @param nodeSettings the nodes settings this index is allocated on.
|
||||
* @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated
|
||||
* @param indexNameMatcher a matcher that can resolve an expression to the index name or index alias
|
||||
*/
|
||||
public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection<Consumer<Settings>> updateListeners, final Predicate<String> indexNameMatcher) {
|
||||
this.nodeSettings = nodeSettings;
|
||||
this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build();
|
||||
this.updateListeners = Collections.unmodifiableList(new ArrayList<>(updateListeners));
|
||||
|
@ -73,6 +137,25 @@ public final class IndexSettings {
|
|||
this.indexMetaData = indexMetaData;
|
||||
numberOfShards = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null);
|
||||
isShadowReplicaIndex = IndexMetaData.isIndexUsingShadowReplicas(settings);
|
||||
|
||||
this.defaultField = settings.get(DEFAULT_FIELD, AllFieldMapper.NAME);
|
||||
this.queryStringLenient = settings.getAsBoolean(QUERY_STRING_LENIENT, false);
|
||||
this.queryStringAnalyzeWildcard = settings.getAsBoolean(QUERY_STRING_ANALYZE_WILDCARD, false);
|
||||
this.queryStringAllowLeadingWildcard = settings.getAsBoolean(QUERY_STRING_ALLOW_LEADING_WILDCARD, true);
|
||||
this.parseFieldMatcher = new ParseFieldMatcher(settings);
|
||||
this.defaultAllowUnmappedFields = settings.getAsBoolean(ALLOW_UNMAPPED, true);
|
||||
this.indexNameMatcher = indexNameMatcher;
|
||||
assert indexNameMatcher.test(indexMetaData.getIndex());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new {@link IndexSettings} instance adding the given listeners to the settings
|
||||
*/
|
||||
IndexSettings newWithListener(final Collection<Consumer<Settings>> updateListeners) {
|
||||
ArrayList<Consumer<Settings>> newUpdateListeners = new ArrayList<>(updateListeners);
|
||||
newUpdateListeners.addAll(this.updateListeners);
|
||||
return new IndexSettings(indexMetaData, nodeSettings, newUpdateListeners, indexNameMatcher);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -138,9 +221,7 @@ public final class IndexSettings {
|
|||
* Returns <code>true</code> iff this index uses shadow replicas.
|
||||
* @see IndexMetaData#isIndexUsingShadowReplicas(Settings)
|
||||
*/
|
||||
public boolean isShadowReplicaIndex() {
|
||||
return isShadowReplicaIndex;
|
||||
}
|
||||
public boolean isShadowReplicaIndex() { return isShadowReplicaIndex; }
|
||||
|
||||
/**
|
||||
* Returns the node settings. The settings retured from {@link #getSettings()} are a merged version of the
|
||||
|
@ -150,6 +231,18 @@ public final class IndexSettings {
|
|||
return nodeSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link ParseFieldMatcher} for this index.
|
||||
*/
|
||||
public ParseFieldMatcher getParseFieldMatcher() { return parseFieldMatcher; }
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if the given expression matches the index name or one of it's aliases
|
||||
*/
|
||||
public boolean matchesIndexName(String expression) {
|
||||
return indexNameMatcher.test(expression);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the settings and index metadata and notifies all registered settings consumers with the new settings iff at least one setting has changed.
|
||||
*
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.termvectors.TermVectorsService;
|
||||
import org.elasticsearch.indices.IndicesWarmer;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.memory.IndexingMemoryController;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
/**
|
||||
* Simple provider class that holds the Index and Node level services used by
|
||||
* a shard.
|
||||
* This is just a temporary solution until we cleaned up index creation and removed injectors on that level as well.
|
||||
*/
|
||||
public final class NodeServicesProvider {
|
||||
|
||||
private final ThreadPool threadPool;
|
||||
private final IndicesQueryCache indicesQueryCache;
|
||||
private final TermVectorsService termVectorsService;
|
||||
private final IndicesWarmer warmer;
|
||||
private final BigArrays bigArrays;
|
||||
private final Client client;
|
||||
private final IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
private final ScriptService scriptService;
|
||||
private final IndicesFieldDataCache indicesFieldDataCache;
|
||||
private final CircuitBreakerService circuitBreakerService;
|
||||
|
||||
@Inject
|
||||
public NodeServicesProvider(ThreadPool threadPool, IndicesQueryCache indicesQueryCache, TermVectorsService termVectorsService, @Nullable IndicesWarmer warmer, BigArrays bigArrays, Client client, ScriptService scriptService, IndicesQueriesRegistry indicesQueriesRegistry, IndicesFieldDataCache indicesFieldDataCache, CircuitBreakerService circuitBreakerService) {
|
||||
this.threadPool = threadPool;
|
||||
this.indicesQueryCache = indicesQueryCache;
|
||||
this.termVectorsService = termVectorsService;
|
||||
this.warmer = warmer;
|
||||
this.bigArrays = bigArrays;
|
||||
this.client = client;
|
||||
this.indicesQueriesRegistry = indicesQueriesRegistry;
|
||||
this.scriptService = scriptService;
|
||||
this.indicesFieldDataCache = indicesFieldDataCache;
|
||||
this.circuitBreakerService = circuitBreakerService;
|
||||
}
|
||||
|
||||
public ThreadPool getThreadPool() {
|
||||
return threadPool;
|
||||
}
|
||||
|
||||
public IndicesQueryCache getIndicesQueryCache() {
|
||||
return indicesQueryCache;
|
||||
}
|
||||
|
||||
public TermVectorsService getTermVectorsService() {
|
||||
return termVectorsService;
|
||||
}
|
||||
|
||||
public IndicesWarmer getWarmer() {
|
||||
return warmer;
|
||||
}
|
||||
|
||||
public BigArrays getBigArrays() { return bigArrays; }
|
||||
|
||||
public Client getClient() {
|
||||
return client;
|
||||
}
|
||||
|
||||
public IndicesQueriesRegistry getIndicesQueriesRegistry() {
|
||||
return indicesQueriesRegistry;
|
||||
}
|
||||
|
||||
public ScriptService getScriptService() {
|
||||
return scriptService;
|
||||
}
|
||||
|
||||
public IndicesFieldDataCache getIndicesFieldDataCache() {
|
||||
return indicesFieldDataCache;
|
||||
}
|
||||
|
||||
public CircuitBreakerService getCircuitBreakerService() {
|
||||
return circuitBreakerService;
|
||||
}
|
||||
}
|
|
@ -69,44 +69,26 @@ import java.util.concurrent.Executor;
|
|||
public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener<Object, Cache<Query, BitsetFilterCache.Value>>, Closeable {
|
||||
|
||||
public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly";
|
||||
private static final Listener DEFAULT_NOOP_LISTENER = new Listener() {
|
||||
@Override
|
||||
public void onCache(ShardId shardId, Accountable accountable) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoval(ShardId shardId, Accountable accountable) {
|
||||
}
|
||||
};
|
||||
|
||||
private final boolean loadRandomAccessFiltersEagerly;
|
||||
private final Cache<Object, Cache<Query, Value>> loadedFilters;
|
||||
private volatile Listener listener = DEFAULT_NOOP_LISTENER;
|
||||
private final Listener listener;
|
||||
private final BitSetProducerWarmer warmer;
|
||||
private final IndicesWarmer indicesWarmer;
|
||||
|
||||
public BitsetFilterCache(IndexSettings indexSettings, IndicesWarmer indicesWarmer) {
|
||||
public BitsetFilterCache(IndexSettings indexSettings, IndicesWarmer indicesWarmer, Listener listener) {
|
||||
super(indexSettings);
|
||||
if (listener == null) {
|
||||
throw new IllegalArgumentException("listener must not be null");
|
||||
}
|
||||
this.loadRandomAccessFiltersEagerly = this.indexSettings.getSettings().getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true);
|
||||
this.loadedFilters = CacheBuilder.<Object, Cache<Query, Value>>builder().removalListener(this).build();
|
||||
this.warmer = new BitSetProducerWarmer();
|
||||
this.indicesWarmer = indicesWarmer;
|
||||
indicesWarmer.addListener(warmer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a listener that is invoked for all subsequent cache and removal events.
|
||||
* @throws IllegalStateException if the listener is set more than once
|
||||
*/
|
||||
public void setListener(Listener listener) {
|
||||
if (listener == null) {
|
||||
throw new IllegalArgumentException("listener must not be null");
|
||||
}
|
||||
if (this.listener != DEFAULT_NOOP_LISTENER) {
|
||||
throw new IllegalStateException("can't set listener more than once");
|
||||
}
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public BitSetProducer getBitSetProducer(Query query) {
|
||||
|
|
|
@ -22,15 +22,10 @@ package org.elasticsearch.index.codec;
|
|||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
|
||||
import org.apache.lucene.codecs.lucene54.Lucene54Codec;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
|
@ -38,11 +33,9 @@ import java.util.Map;
|
|||
* codec layer that allows to use use-case specific file formats &
|
||||
* data-structures per field. Elasticsearch exposes the full
|
||||
* {@link Codec} capabilities through this {@link CodecService}.
|
||||
*
|
||||
*/
|
||||
public class CodecService extends AbstractIndexComponent {
|
||||
public class CodecService {
|
||||
|
||||
private final MapperService mapperService;
|
||||
private final Map<String, Codec> codecs;
|
||||
|
||||
public final static String DEFAULT_CODEC = "default";
|
||||
|
@ -50,11 +43,8 @@ public class CodecService extends AbstractIndexComponent {
|
|||
/** the raw unfiltered lucene default. useful for testing */
|
||||
public final static String LUCENE_DEFAULT_CODEC = "lucene_default";
|
||||
|
||||
@Inject
|
||||
public CodecService(IndexSettings indexSettings, MapperService mapperService) {
|
||||
super(indexSettings);
|
||||
this.mapperService = mapperService;
|
||||
MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
|
||||
public CodecService(@Nullable MapperService mapperService, ESLogger logger) {
|
||||
final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
|
||||
if (mapperService == null) {
|
||||
codecs.put(DEFAULT_CODEC, new Lucene54Codec());
|
||||
codecs.put(BEST_COMPRESSION_CODEC, new Lucene54Codec(Mode.BEST_COMPRESSION));
|
||||
|
@ -71,10 +61,6 @@ public class CodecService extends AbstractIndexComponent {
|
|||
this.codecs = codecs.immutableMap();
|
||||
}
|
||||
|
||||
public MapperService mapperService() {
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public Codec codec(String name) {
|
||||
Codec codec = codecs.get(name);
|
||||
if (codec == null) {
|
||||
|
|
|
@ -58,10 +58,7 @@ public class PerFieldMappingPostingFormatCodec extends Lucene54Codec {
|
|||
if (indexName == null) {
|
||||
logger.warn("no index mapper found for field: [{}] returning default postings format", field);
|
||||
} else if (indexName instanceof CompletionFieldMapper.CompletionFieldType) {
|
||||
// CompletionFieldMapper needs a special postings format
|
||||
final CompletionFieldMapper.CompletionFieldType fieldType = (CompletionFieldMapper.CompletionFieldType) indexName;
|
||||
final PostingsFormat defaultFormat = super.getPostingsFormatForField(field);
|
||||
return fieldType.postingsFormat(defaultFormat);
|
||||
return CompletionFieldMapper.CompletionFieldType.postingsFormat();
|
||||
}
|
||||
return super.getPostingsFormatForField(field);
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.util.Accountable;
|
|||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData;
|
||||
|
@ -44,6 +43,8 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
|
@ -54,7 +55,7 @@ import static java.util.Collections.unmodifiableMap;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class IndexFieldDataService extends AbstractIndexComponent {
|
||||
public class IndexFieldDataService extends AbstractIndexComponent implements Closeable {
|
||||
|
||||
public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache";
|
||||
public static final String FIELDDATA_CACHE_VALUE_NODE = "node";
|
||||
|
@ -157,7 +158,6 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
private volatile IndexFieldDataCache.Listener listener = DEFAULT_NOOP_LISTENER;
|
||||
|
||||
|
||||
@Inject
|
||||
public IndexFieldDataService(IndexSettings indexSettings, IndicesFieldDataCache indicesFieldDataCache,
|
||||
CircuitBreakerService circuitBreakerService, MapperService mapperService) {
|
||||
super(indexSettings);
|
||||
|
@ -260,4 +260,8 @@ public class IndexFieldDataService extends AbstractIndexComponent {
|
|||
this.listener = listener;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
clear();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,9 +21,7 @@ package org.elasticsearch.index.mapper;
|
|||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.core.*;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.*;
|
||||
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||
|
@ -92,10 +90,6 @@ public final class MapperBuilders {
|
|||
return new DoubleFieldMapper.Builder(name);
|
||||
}
|
||||
|
||||
public static GeoPointFieldMapper.Builder geoPointField(String name) {
|
||||
return new GeoPointFieldMapper.Builder(name);
|
||||
}
|
||||
|
||||
public static GeoShapeFieldMapper.Builder geoShapeField(String name) {
|
||||
return new GeoShapeFieldMapper.Builder(name);
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.util.concurrent.ReleasableLock;
|
||||
|
@ -105,7 +104,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
|
||||
private volatile Set<String> parentTypes = emptySet();
|
||||
|
||||
@Inject
|
||||
public MapperService(IndexSettings indexSettings, AnalysisService analysisService,
|
||||
SimilarityService similarityService) {
|
||||
super(indexSettings);
|
||||
|
|
|
@ -18,144 +18,91 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.codecs.PostingsFormat;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.search.suggest.document.Completion50PostingsFormat;
|
||||
import org.apache.lucene.search.suggest.document.CompletionAnalyzer;
|
||||
import org.apache.lucene.search.suggest.document.CompletionQuery;
|
||||
import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery;
|
||||
import org.apache.lucene.search.suggest.document.PrefixCompletionQuery;
|
||||
import org.apache.lucene.search.suggest.document.RegexCompletionQuery;
|
||||
import org.apache.lucene.search.suggest.document.SuggestField;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.NumberType;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperException;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.search.suggest.completion.AnalyzingCompletionLookupProvider;
|
||||
import org.elasticsearch.search.suggest.completion.Completion090PostingsFormat;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionTokenStream;
|
||||
import org.elasticsearch.search.suggest.context.ContextBuilder;
|
||||
import org.elasticsearch.search.suggest.context.ContextMapping;
|
||||
import org.elasticsearch.search.suggest.context.ContextMapping.ContextConfig;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
|
||||
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
|
||||
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.completionField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||
|
||||
/**
|
||||
* Mapper for completion field. The field values are indexed as a weighted FST for
|
||||
* fast auto-completion/search-as-you-type functionality.<br>
|
||||
*
|
||||
* Type properties:<br>
|
||||
* <ul>
|
||||
* <li>"analyzer": "simple", (default)</li>
|
||||
* <li>"search_analyzer": "simple", (default)</li>
|
||||
* <li>"preserve_separators" : true, (default)</li>
|
||||
* <li>"preserve_position_increments" : true (default)</li>
|
||||
* <li>"min_input_length": 50 (default)</li>
|
||||
* <li>"contexts" : CONTEXTS</li>
|
||||
* </ul>
|
||||
* see {@link ContextMappings#load(Object, Version)} for CONTEXTS<br>
|
||||
* see {@link #parse(ParseContext)} for acceptable inputs for indexing<br>
|
||||
* <p>
|
||||
* This field type constructs completion queries that are run
|
||||
* against the weighted FST index by the {@link CompletionSuggester}.
|
||||
* This field can also be extended to add search criteria to suggestions
|
||||
* for query-time filtering and boosting (see {@link ContextMappings}
|
||||
*/
|
||||
public class CompletionFieldMapper extends FieldMapper {
|
||||
public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapperParser {
|
||||
|
||||
public static final String CONTENT_TYPE = "completion";
|
||||
|
||||
public static class Defaults {
|
||||
public static final CompletionFieldType FIELD_TYPE = new CompletionFieldType();
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new CompletionFieldType();
|
||||
static {
|
||||
FIELD_TYPE.setOmitNorms(true);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
|
||||
public static final boolean DEFAULT_PRESERVE_SEPARATORS = true;
|
||||
public static final boolean DEFAULT_POSITION_INCREMENTS = true;
|
||||
public static final boolean DEFAULT_HAS_PAYLOADS = false;
|
||||
public static final int DEFAULT_MAX_INPUT_LENGTH = 50;
|
||||
}
|
||||
|
||||
public static class Fields {
|
||||
// Mapping field names
|
||||
public static final String ANALYZER = "analyzer";
|
||||
public static final ParseField ANALYZER = new ParseField("analyzer");
|
||||
public static final ParseField SEARCH_ANALYZER = new ParseField("search_analyzer");
|
||||
public static final ParseField PRESERVE_SEPARATORS = new ParseField("preserve_separators");
|
||||
public static final ParseField PRESERVE_POSITION_INCREMENTS = new ParseField("preserve_position_increments");
|
||||
public static final String PAYLOADS = "payloads";
|
||||
public static final String TYPE = "type";
|
||||
public static final ParseField TYPE = new ParseField("type");
|
||||
public static final ParseField CONTEXTS = new ParseField("contexts");
|
||||
public static final ParseField MAX_INPUT_LENGTH = new ParseField("max_input_length", "max_input_len");
|
||||
// Content field names
|
||||
public static final String CONTENT_FIELD_NAME_INPUT = "input";
|
||||
public static final String CONTENT_FIELD_NAME_OUTPUT = "output";
|
||||
public static final String CONTENT_FIELD_NAME_PAYLOAD = "payload";
|
||||
public static final String CONTENT_FIELD_NAME_WEIGHT = "weight";
|
||||
public static final String CONTEXT = "context";
|
||||
public static final String CONTENT_FIELD_NAME_CONTEXTS = "contexts";
|
||||
}
|
||||
|
||||
public static final Set<String> ALLOWED_CONTENT_FIELD_NAMES = Sets.newHashSet(Fields.CONTENT_FIELD_NAME_INPUT,
|
||||
Fields.CONTENT_FIELD_NAME_OUTPUT, Fields.CONTENT_FIELD_NAME_PAYLOAD, Fields.CONTENT_FIELD_NAME_WEIGHT, Fields.CONTEXT);
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder, CompletionFieldMapper> {
|
||||
|
||||
private boolean preserveSeparators = Defaults.DEFAULT_PRESERVE_SEPARATORS;
|
||||
private boolean payloads = Defaults.DEFAULT_HAS_PAYLOADS;
|
||||
private boolean preservePositionIncrements = Defaults.DEFAULT_POSITION_INCREMENTS;
|
||||
private int maxInputLength = Defaults.DEFAULT_MAX_INPUT_LENGTH;
|
||||
private SortedMap<String, ContextMapping> contextMapping = ContextMapping.EMPTY_MAPPING;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder payloads(boolean payloads) {
|
||||
this.payloads = payloads;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder preserveSeparators(boolean preserveSeparators) {
|
||||
this.preserveSeparators = preserveSeparators;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder preservePositionIncrements(boolean preservePositionIncrements) {
|
||||
this.preservePositionIncrements = preservePositionIncrements;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder maxInputLength(int maxInputLength) {
|
||||
if (maxInputLength <= 0) {
|
||||
throw new IllegalArgumentException(Fields.MAX_INPUT_LENGTH.getPreferredName() + " must be > 0 but was [" + maxInputLength + "]");
|
||||
}
|
||||
this.maxInputLength = maxInputLength;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder contextMapping(SortedMap<String, ContextMapping> contextMapping) {
|
||||
this.contextMapping = contextMapping;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletionFieldMapper build(Mapper.BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
CompletionFieldType completionFieldType = (CompletionFieldType)fieldType;
|
||||
completionFieldType.setProvider(new AnalyzingCompletionLookupProvider(preserveSeparators, false, preservePositionIncrements, payloads));
|
||||
completionFieldType.setContextMapping(contextMapping);
|
||||
return new CompletionFieldMapper(name, fieldType, maxInputLength, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
|
||||
}
|
||||
Fields.CONTENT_FIELD_NAME_WEIGHT, Fields.CONTENT_FIELD_NAME_CONTEXTS);
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
|
||||
|
@ -171,17 +118,12 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
if (fieldName.equals("type")) {
|
||||
continue;
|
||||
}
|
||||
if (Fields.ANALYZER.equals(fieldName) || // index_analyzer is for backcompat, remove for v3.0
|
||||
fieldName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
|
||||
if (parserContext.parseFieldMatcher().match(fieldName, Fields.ANALYZER)) {
|
||||
indexAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString());
|
||||
iterator.remove();
|
||||
} else if (parserContext.parseFieldMatcher().match(fieldName, Fields.SEARCH_ANALYZER)) {
|
||||
searchAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString());
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals(Fields.PAYLOADS)) {
|
||||
builder.payloads(Boolean.parseBoolean(fieldNode.toString()));
|
||||
iterator.remove();
|
||||
} else if (parserContext.parseFieldMatcher().match(fieldName, Fields.PRESERVE_SEPARATORS)) {
|
||||
builder.preserveSeparators(Boolean.parseBoolean(fieldNode.toString()));
|
||||
iterator.remove();
|
||||
|
@ -191,14 +133,14 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
} else if (parserContext.parseFieldMatcher().match(fieldName, Fields.MAX_INPUT_LENGTH)) {
|
||||
builder.maxInputLength(Integer.parseInt(fieldNode.toString()));
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
|
||||
} else if (parserContext.parseFieldMatcher().match(fieldName, Fields.CONTEXTS)) {
|
||||
builder.contextMappings(ContextMappings.load(fieldNode, parserContext.indexVersionCreated()));
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals(Fields.CONTEXT)) {
|
||||
builder.contextMapping(ContextBuilder.loadMappings(fieldNode, parserContext.indexVersionCreated()));
|
||||
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (indexAnalyzer == null) {
|
||||
if (searchAnalyzer != null) {
|
||||
throw new MapperParsingException("analyzer on completion field [" + name + "] must be set when search_analyzer is set");
|
||||
|
@ -207,9 +149,9 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
} else if (searchAnalyzer == null) {
|
||||
searchAnalyzer = indexAnalyzer;
|
||||
}
|
||||
|
||||
builder.indexAnalyzer(indexAnalyzer);
|
||||
builder.searchAnalyzer(searchAnalyzer);
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
@ -223,40 +165,138 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
public static final class CompletionFieldType extends MappedFieldType {
|
||||
private PostingsFormat postingsFormat;
|
||||
private AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider;
|
||||
private SortedMap<String, ContextMapping> contextMapping = ContextMapping.EMPTY_MAPPING;
|
||||
|
||||
private static PostingsFormat postingsFormat;
|
||||
|
||||
private boolean preserveSep = Defaults.DEFAULT_PRESERVE_SEPARATORS;
|
||||
private boolean preservePositionIncrements = Defaults.DEFAULT_POSITION_INCREMENTS;
|
||||
private ContextMappings contextMappings = null;
|
||||
|
||||
public CompletionFieldType() {
|
||||
setFieldDataType(null);
|
||||
}
|
||||
|
||||
protected CompletionFieldType(CompletionFieldType ref) {
|
||||
private CompletionFieldType(CompletionFieldType ref) {
|
||||
super(ref);
|
||||
this.postingsFormat = ref.postingsFormat;
|
||||
this.analyzingSuggestLookupProvider = ref.analyzingSuggestLookupProvider;
|
||||
this.contextMapping = ref.contextMapping;
|
||||
this.contextMappings = ref.contextMappings;
|
||||
this.preserveSep = ref.preserveSep;
|
||||
this.preservePositionIncrements = ref.preservePositionIncrements;
|
||||
}
|
||||
|
||||
public void setPreserveSep(boolean preserveSep) {
|
||||
checkIfFrozen();
|
||||
this.preserveSep = preserveSep;
|
||||
}
|
||||
|
||||
public void setPreservePositionIncrements(boolean preservePositionIncrements) {
|
||||
checkIfFrozen();
|
||||
this.preservePositionIncrements = preservePositionIncrements;
|
||||
}
|
||||
|
||||
public void setContextMappings(ContextMappings contextMappings) {
|
||||
checkIfFrozen();
|
||||
this.contextMappings = contextMappings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NamedAnalyzer indexAnalyzer() {
|
||||
final NamedAnalyzer indexAnalyzer = super.indexAnalyzer();
|
||||
if (indexAnalyzer != null && !(indexAnalyzer.analyzer() instanceof CompletionAnalyzer)) {
|
||||
return new NamedAnalyzer(indexAnalyzer.name(),
|
||||
new CompletionAnalyzer(indexAnalyzer, preserveSep, preservePositionIncrements));
|
||||
|
||||
}
|
||||
return indexAnalyzer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NamedAnalyzer searchAnalyzer() {
|
||||
final NamedAnalyzer searchAnalyzer = super.searchAnalyzer();
|
||||
if (searchAnalyzer != null && !(searchAnalyzer.analyzer() instanceof CompletionAnalyzer)) {
|
||||
return new NamedAnalyzer(searchAnalyzer.name(),
|
||||
new CompletionAnalyzer(searchAnalyzer, preserveSep, preservePositionIncrements));
|
||||
}
|
||||
return searchAnalyzer;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if there are one or more context mappings defined
|
||||
* for this field type
|
||||
*/
|
||||
public boolean hasContextMappings() {
|
||||
return contextMappings != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return associated context mappings for this field type
|
||||
*/
|
||||
public ContextMappings getContextMappings() {
|
||||
return contextMappings;
|
||||
}
|
||||
|
||||
public boolean preserveSep() {
|
||||
return preserveSep;
|
||||
}
|
||||
|
||||
public boolean preservePositionIncrements() {
|
||||
return preservePositionIncrements;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return postings format to use for this field-type
|
||||
*/
|
||||
public static synchronized PostingsFormat postingsFormat() {
|
||||
if (postingsFormat == null) {
|
||||
postingsFormat = new Completion50PostingsFormat();
|
||||
}
|
||||
return postingsFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Completion prefix query
|
||||
*/
|
||||
public CompletionQuery prefixQuery(Object value) {
|
||||
return new PrefixCompletionQuery(searchAnalyzer().analyzer(), createTerm(value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Completion prefix regular expression query
|
||||
*/
|
||||
public CompletionQuery regexpQuery(Object value, int flags, int maxDeterminizedStates) {
|
||||
return new RegexCompletionQuery(createTerm(value), flags, maxDeterminizedStates);
|
||||
}
|
||||
|
||||
/**
|
||||
* Completion prefix fuzzy query
|
||||
*/
|
||||
public CompletionQuery fuzzyQuery(String value, Fuzziness fuzziness, int nonFuzzyPrefixLength,
|
||||
int minFuzzyPrefixLength, int maxExpansions, boolean transpositions,
|
||||
boolean unicodeAware) {
|
||||
return new FuzzyCompletionQuery(searchAnalyzer().analyzer(), createTerm(value), null,
|
||||
fuzziness.asDistance(), transpositions, nonFuzzyPrefixLength, minFuzzyPrefixLength,
|
||||
unicodeAware, maxExpansions);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof CompletionFieldType)) return false;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
CompletionFieldType fieldType = (CompletionFieldType) o;
|
||||
return analyzingSuggestLookupProvider.getPreserveSep() == fieldType.analyzingSuggestLookupProvider.getPreserveSep() &&
|
||||
analyzingSuggestLookupProvider.getPreservePositionsIncrements() == fieldType.analyzingSuggestLookupProvider.getPreservePositionsIncrements() &&
|
||||
analyzingSuggestLookupProvider.hasPayloads() == fieldType.analyzingSuggestLookupProvider.hasPayloads() &&
|
||||
Objects.equals(getContextMapping(), fieldType.getContextMapping());
|
||||
|
||||
CompletionFieldType that = (CompletionFieldType) o;
|
||||
|
||||
if (preserveSep != that.preserveSep) return false;
|
||||
if (preservePositionIncrements != that.preservePositionIncrements) return false;
|
||||
return !(contextMappings != null ? !contextMappings.equals(that.contextMappings) : that.contextMappings != null);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(),
|
||||
analyzingSuggestLookupProvider.getPreserveSep(),
|
||||
analyzingSuggestLookupProvider.getPreservePositionsIncrements(),
|
||||
analyzingSuggestLookupProvider.hasPayloads(),
|
||||
getContextMapping());
|
||||
preserveSep,
|
||||
preservePositionIncrements,
|
||||
contextMappings);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -273,50 +313,20 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
|
||||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
CompletionFieldType other = (CompletionFieldType)fieldType;
|
||||
if (analyzingSuggestLookupProvider.hasPayloads() != other.analyzingSuggestLookupProvider.hasPayloads()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [payload] values");
|
||||
}
|
||||
if (analyzingSuggestLookupProvider.getPreservePositionsIncrements() != other.analyzingSuggestLookupProvider.getPreservePositionsIncrements()) {
|
||||
|
||||
if (preservePositionIncrements != other.preservePositionIncrements) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [preserve_position_increments] values");
|
||||
}
|
||||
if (analyzingSuggestLookupProvider.getPreserveSep() != other.analyzingSuggestLookupProvider.getPreserveSep()) {
|
||||
if (preserveSep != other.preserveSep) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [preserve_separators] values");
|
||||
}
|
||||
if(!ContextMapping.mappingsAreEqual(getContextMapping(), other.getContextMapping())) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [context_mapping] values");
|
||||
if (hasContextMappings() != other.hasContextMappings()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [context_mappings] values");
|
||||
} else if (hasContextMappings() && contextMappings.equals(other.contextMappings) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [context_mappings] values");
|
||||
}
|
||||
}
|
||||
|
||||
public void setProvider(AnalyzingCompletionLookupProvider provider) {
|
||||
checkIfFrozen();
|
||||
this.analyzingSuggestLookupProvider = provider;
|
||||
}
|
||||
|
||||
public synchronized PostingsFormat postingsFormat(PostingsFormat in) {
|
||||
if (in instanceof Completion090PostingsFormat) {
|
||||
throw new IllegalStateException("Double wrapping of " + Completion090PostingsFormat.class);
|
||||
}
|
||||
if (postingsFormat == null) {
|
||||
postingsFormat = new Completion090PostingsFormat(in, analyzingSuggestLookupProvider);
|
||||
}
|
||||
return postingsFormat;
|
||||
}
|
||||
|
||||
public void setContextMapping(SortedMap<String, ContextMapping> contextMapping) {
|
||||
checkIfFrozen();
|
||||
this.contextMapping = contextMapping;
|
||||
}
|
||||
|
||||
/** Get the context mapping associated with this completion field */
|
||||
public SortedMap<String, ContextMapping> getContextMapping() {
|
||||
return contextMapping;
|
||||
}
|
||||
|
||||
/** @return true if a context mapping has been defined */
|
||||
public boolean requiresContext() {
|
||||
return contextMapping.isEmpty() == false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String value(Object value) {
|
||||
if (value == null) {
|
||||
|
@ -329,13 +339,73 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
public boolean isSortable() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static final BytesRef EMPTY = new BytesRef();
|
||||
/**
|
||||
* Builder for {@link CompletionFieldMapper}
|
||||
*/
|
||||
public static class Builder extends FieldMapper.Builder<Builder, CompletionFieldMapper> {
|
||||
|
||||
private int maxInputLength = Defaults.DEFAULT_MAX_INPUT_LENGTH;
|
||||
private ContextMappings contextMappings = null;
|
||||
private boolean preserveSeparators = Defaults.DEFAULT_PRESERVE_SEPARATORS;
|
||||
private boolean preservePositionIncrements = Defaults.DEFAULT_POSITION_INCREMENTS;
|
||||
|
||||
/**
|
||||
* @param name of the completion field to build
|
||||
*/
|
||||
public Builder(String name) {
|
||||
super(name, new CompletionFieldType());
|
||||
builder = this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param maxInputLength maximum expected prefix length
|
||||
* NOTE: prefixes longer than this will
|
||||
* be truncated
|
||||
*/
|
||||
public Builder maxInputLength(int maxInputLength) {
|
||||
if (maxInputLength <= 0) {
|
||||
throw new IllegalArgumentException(Fields.MAX_INPUT_LENGTH.getPreferredName() + " must be > 0 but was [" + maxInputLength + "]");
|
||||
}
|
||||
this.maxInputLength = maxInputLength;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add context mapping to this field
|
||||
* @param contextMappings see {@link ContextMappings#load(Object, Version)}
|
||||
*/
|
||||
public Builder contextMappings(ContextMappings contextMappings) {
|
||||
this.contextMappings = contextMappings;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder preserveSeparators(boolean preserveSeparators) {
|
||||
this.preserveSeparators = preserveSeparators;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder preservePositionIncrements(boolean preservePositionIncrements) {
|
||||
this.preservePositionIncrements = preservePositionIncrements;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletionFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
CompletionFieldType completionFieldType = (CompletionFieldType) this.fieldType;
|
||||
completionFieldType.setContextMappings(contextMappings);
|
||||
completionFieldType.setPreservePositionIncrements(preservePositionIncrements);
|
||||
completionFieldType.setPreserveSep(preserveSeparators);
|
||||
return new CompletionFieldMapper(name, this.fieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, maxInputLength);
|
||||
}
|
||||
}
|
||||
|
||||
private int maxInputLength;
|
||||
|
||||
public CompletionFieldMapper(String simpleName, MappedFieldType fieldType, int maxInputLength, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
public CompletionFieldMapper(String simpleName, MappedFieldType fieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, int maxInputLength) {
|
||||
super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo);
|
||||
this.maxInputLength = maxInputLength;
|
||||
}
|
||||
|
@ -345,216 +415,188 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
return (CompletionFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and indexes inputs
|
||||
*
|
||||
* Parsing:
|
||||
* Acceptable format:
|
||||
* "STRING" - interpreted as field value (input)
|
||||
* "ARRAY" - each element can be one of {@link #parse(ParseContext, Token, XContentParser, Map)}
|
||||
* "OBJECT" - see {@link #parse(ParseContext, Token, XContentParser, Map)}
|
||||
*
|
||||
* Indexing:
|
||||
* if context mappings are defined, delegates to {@link ContextMappings#addField(ParseContext.Document, String, String, int, Map)}
|
||||
* else adds inputs as a {@link org.apache.lucene.search.suggest.document.SuggestField}
|
||||
*/
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
// parse
|
||||
XContentParser parser = context.parser();
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
Token token = parser.currentToken();
|
||||
Map<String, CompletionInputMetaData> inputMap = new HashMap<>(1);
|
||||
if (token == Token.VALUE_NULL) {
|
||||
throw new MapperParsingException("completion field [" + fieldType().names().fullName() + "] does not support null values");
|
||||
}
|
||||
|
||||
String surfaceForm = null;
|
||||
BytesRef payload = null;
|
||||
long weight = -1;
|
||||
List<String> inputs = new ArrayList<>(4);
|
||||
|
||||
SortedMap<String, ContextConfig> contextConfig = null;
|
||||
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
inputs.add(parser.text());
|
||||
multiFields.parse(this, context);
|
||||
} else {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
if (!ALLOWED_CONTENT_FIELD_NAMES.contains(currentFieldName)) {
|
||||
throw new IllegalArgumentException("Unknown field name[" + currentFieldName + "], must be one of " + ALLOWED_CONTENT_FIELD_NAMES);
|
||||
}
|
||||
} else if (Fields.CONTEXT.equals(currentFieldName)) {
|
||||
SortedMap<String, ContextConfig> configs = new TreeMap<>();
|
||||
|
||||
if (token == Token.START_OBJECT) {
|
||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||
String name = parser.text();
|
||||
ContextMapping mapping = fieldType().getContextMapping().get(name);
|
||||
if (mapping == null) {
|
||||
throw new ElasticsearchParseException("context [{}] is not defined", name);
|
||||
} else {
|
||||
token = parser.nextToken();
|
||||
configs.put(name, mapping.parseContext(context, parser));
|
||||
}
|
||||
}
|
||||
contextConfig = new TreeMap<>();
|
||||
for (ContextMapping mapping : fieldType().getContextMapping().values()) {
|
||||
ContextConfig config = configs.get(mapping.name());
|
||||
contextConfig.put(mapping.name(), config==null ? mapping.defaultConfig() : config);
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("context must be an object");
|
||||
}
|
||||
} else if (Fields.CONTENT_FIELD_NAME_PAYLOAD.equals(currentFieldName)) {
|
||||
if (!isStoringPayloads()) {
|
||||
throw new MapperException("Payloads disabled in mapping");
|
||||
}
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
XContentBuilder payloadBuilder = XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser);
|
||||
payload = payloadBuilder.bytes().toBytesRef();
|
||||
payloadBuilder.close();
|
||||
} else if (token.isValue()) {
|
||||
payload = parser.utf8BytesOrNull();
|
||||
} else {
|
||||
throw new MapperException("payload doesn't support type " + token);
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (Fields.CONTENT_FIELD_NAME_OUTPUT.equals(currentFieldName)) {
|
||||
surfaceForm = parser.text();
|
||||
}
|
||||
if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) {
|
||||
inputs.add(parser.text());
|
||||
}
|
||||
if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) {
|
||||
Number weightValue;
|
||||
try {
|
||||
weightValue = Long.parseLong(parser.text());
|
||||
} catch (NumberFormatException e) {
|
||||
throw new IllegalArgumentException("Weight must be a string representing a numeric value, but was [" + parser.text() + "]");
|
||||
}
|
||||
weight = weightValue.longValue(); // always parse a long to make sure we don't get overflow
|
||||
checkWeight(weight);
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) {
|
||||
NumberType numberType = parser.numberType();
|
||||
if (NumberType.LONG != numberType && NumberType.INT != numberType) {
|
||||
throw new IllegalArgumentException("Weight must be an integer, but was [" + parser.numberValue() + "]");
|
||||
}
|
||||
weight = parser.longValue(); // always parse a long to make sure we don't get overflow
|
||||
checkWeight(weight);
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
inputs.add(parser.text());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(contextConfig == null) {
|
||||
contextConfig = new TreeMap<>();
|
||||
for (ContextMapping mapping : fieldType().getContextMapping().values()) {
|
||||
contextConfig.put(mapping.name(), mapping.defaultConfig());
|
||||
}
|
||||
}
|
||||
|
||||
final ContextMapping.Context ctx = new ContextMapping.Context(contextConfig, context.doc());
|
||||
|
||||
payload = payload == null ? EMPTY : payload;
|
||||
if (surfaceForm == null) { // no surface form use the input
|
||||
for (String input : inputs) {
|
||||
if (input.length() == 0) {
|
||||
continue;
|
||||
}
|
||||
BytesRef suggestPayload = fieldType().analyzingSuggestLookupProvider.buildPayload(new BytesRef(
|
||||
input), weight, payload);
|
||||
context.doc().add(getCompletionField(ctx, input, suggestPayload));
|
||||
} else if (token == Token.START_ARRAY) {
|
||||
while ((token = parser.nextToken()) != Token.END_ARRAY) {
|
||||
parse(context, token, parser, inputMap);
|
||||
}
|
||||
} else {
|
||||
BytesRef suggestPayload = fieldType().analyzingSuggestLookupProvider.buildPayload(new BytesRef(
|
||||
surfaceForm), weight, payload);
|
||||
for (String input : inputs) {
|
||||
if (input.length() == 0) {
|
||||
continue;
|
||||
parse(context, token, parser, inputMap);
|
||||
}
|
||||
|
||||
// index
|
||||
for (Map.Entry<String, CompletionInputMetaData> completionInput : inputMap.entrySet()) {
|
||||
String input = completionInput.getKey();
|
||||
// truncate input
|
||||
if (input.length() > maxInputLength) {
|
||||
int len = Math.min(maxInputLength, input.length());
|
||||
if (Character.isHighSurrogate(input.charAt(len - 1))) {
|
||||
assert input.length() >= len + 1 && Character.isLowSurrogate(input.charAt(len));
|
||||
len += 1;
|
||||
}
|
||||
context.doc().add(getCompletionField(ctx, input, suggestPayload));
|
||||
input = input.substring(0, len);
|
||||
}
|
||||
CompletionInputMetaData metaData = completionInput.getValue();
|
||||
if (fieldType().hasContextMappings()) {
|
||||
fieldType().getContextMappings().addField(context.doc(), fieldType().names().indexName(),
|
||||
input, metaData.weight, metaData.contexts);
|
||||
} else {
|
||||
context.doc().add(new SuggestField(fieldType().names().indexName(), input, metaData.weight));
|
||||
}
|
||||
}
|
||||
multiFields.parse(this, context);
|
||||
return null;
|
||||
}
|
||||
|
||||
private void checkWeight(long weight) {
|
||||
if (weight < 0 || weight > Integer.MAX_VALUE) {
|
||||
throw new IllegalArgumentException("Weight must be in the interval [0..2147483647], but was [" + weight + "]");
|
||||
}
|
||||
}
|
||||
|
||||
public Field getCompletionField(ContextMapping.Context ctx, String input, BytesRef payload) {
|
||||
final String originalInput = input;
|
||||
if (input.length() > maxInputLength) {
|
||||
final int len = correctSubStringLen(input, Math.min(maxInputLength, input.length()));
|
||||
input = input.substring(0, len);
|
||||
}
|
||||
for (int i = 0; i < input.length(); i++) {
|
||||
if (isReservedChar(input.charAt(i))) {
|
||||
throw new IllegalArgumentException("Illegal input [" + originalInput + "] UTF-16 codepoint [0x"
|
||||
+ Integer.toHexString((int) input.charAt(i)).toUpperCase(Locale.ROOT)
|
||||
+ "] at position " + i + " is a reserved character");
|
||||
/**
|
||||
* Acceptable inputs:
|
||||
* "STRING" - interpreted as the field value (input)
|
||||
* "OBJECT" - { "input": STRING|ARRAY, "weight": STRING|INT, "contexts": ARRAY|OBJECT }
|
||||
*/
|
||||
private void parse(ParseContext parseContext, Token token, XContentParser parser, Map<String, CompletionInputMetaData> inputMap) throws IOException {
|
||||
String currentFieldName = null;
|
||||
if (token == Token.VALUE_STRING) {
|
||||
inputMap.put(parser.text(), new CompletionInputMetaData(Collections.<String, Set<CharSequence>>emptyMap(), 1));
|
||||
} else if (token == Token.START_OBJECT) {
|
||||
Set<String> inputs = new HashSet<>();
|
||||
int weight = 1;
|
||||
Map<String, Set<CharSequence>> contextsMap = new HashMap<>();
|
||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||
if (token == Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
if (!ALLOWED_CONTENT_FIELD_NAMES.contains(currentFieldName)) {
|
||||
throw new IllegalArgumentException("unknown field name [" + currentFieldName + "], must be one of " + ALLOWED_CONTENT_FIELD_NAMES);
|
||||
}
|
||||
} else if (currentFieldName != null) {
|
||||
if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) {
|
||||
if (token == Token.VALUE_STRING) {
|
||||
inputs.add(parser.text());
|
||||
} else if (token == Token.START_ARRAY) {
|
||||
while ((token = parser.nextToken()) != Token.END_ARRAY) {
|
||||
if (token == Token.VALUE_STRING) {
|
||||
inputs.add(parser.text());
|
||||
} else {
|
||||
throw new IllegalArgumentException("input array must have string values, but was [" + token.name() + "]");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("input must be a string or array, but was [" + token.name() + "]");
|
||||
}
|
||||
} else if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) {
|
||||
final Number weightValue;
|
||||
if (token == Token.VALUE_STRING) {
|
||||
try {
|
||||
weightValue = Long.parseLong(parser.text());
|
||||
} catch (NumberFormatException e) {
|
||||
throw new IllegalArgumentException("weight must be an integer, but was [" + parser.text() + "]");
|
||||
}
|
||||
} else if (token == Token.VALUE_NUMBER) {
|
||||
NumberType numberType = parser.numberType();
|
||||
if (NumberType.LONG != numberType && NumberType.INT != numberType) {
|
||||
throw new IllegalArgumentException("weight must be an integer, but was [" + parser.numberValue() + "]");
|
||||
}
|
||||
weightValue = parser.numberValue();
|
||||
} else {
|
||||
throw new IllegalArgumentException("weight must be a number or string, but was [" + token.name() + "]");
|
||||
}
|
||||
if (weightValue.longValue() < 0 || weightValue.longValue() > Integer.MAX_VALUE) { // always parse a long to make sure we don't get overflow
|
||||
throw new IllegalArgumentException("weight must be in the interval [0..2147483647], but was [" + weightValue.longValue() + "]");
|
||||
}
|
||||
weight = weightValue.intValue();
|
||||
} else if (Fields.CONTENT_FIELD_NAME_CONTEXTS.equals(currentFieldName)) {
|
||||
if (fieldType().hasContextMappings() == false) {
|
||||
throw new IllegalArgumentException("contexts field is not supported for field: [" + fieldType().names().fullName() + "]");
|
||||
}
|
||||
ContextMappings contextMappings = fieldType().getContextMappings();
|
||||
XContentParser.Token currentToken = parser.currentToken();
|
||||
if (currentToken == XContentParser.Token.START_OBJECT) {
|
||||
ContextMapping contextMapping = null;
|
||||
String fieldName = null;
|
||||
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (currentToken == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
contextMapping = contextMappings.get(fieldName);
|
||||
} else if (currentToken == XContentParser.Token.VALUE_STRING
|
||||
|| currentToken == XContentParser.Token.START_ARRAY
|
||||
|| currentToken == XContentParser.Token.START_OBJECT) {
|
||||
assert fieldName != null;
|
||||
assert !contextsMap.containsKey(fieldName);
|
||||
contextsMap.put(fieldName, contextMapping.parseContext(parseContext, parser));
|
||||
} else {
|
||||
throw new IllegalArgumentException("contexts must be an object or an array , but was [" + currentToken + "]");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("contexts must be an object or an array , but was [" + currentToken + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return new SuggestField(fieldType().names().indexName(), ctx, input, fieldType(), payload, fieldType().analyzingSuggestLookupProvider);
|
||||
}
|
||||
|
||||
public static int correctSubStringLen(String input, int len) {
|
||||
if (Character.isHighSurrogate(input.charAt(len - 1))) {
|
||||
assert input.length() >= len + 1 && Character.isLowSurrogate(input.charAt(len));
|
||||
return len + 1;
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
public BytesRef buildPayload(BytesRef surfaceForm, long weight, BytesRef payload) throws IOException {
|
||||
return fieldType().analyzingSuggestLookupProvider.buildPayload(surfaceForm, weight, payload);
|
||||
}
|
||||
|
||||
private static final class SuggestField extends Field {
|
||||
private final BytesRef payload;
|
||||
private final CompletionTokenStream.ToFiniteStrings toFiniteStrings;
|
||||
private final ContextMapping.Context ctx;
|
||||
|
||||
public SuggestField(String name, ContextMapping.Context ctx, String value, MappedFieldType type, BytesRef payload, CompletionTokenStream.ToFiniteStrings toFiniteStrings) {
|
||||
super(name, value, type);
|
||||
this.payload = payload;
|
||||
this.toFiniteStrings = toFiniteStrings;
|
||||
this.ctx = ctx;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) throws IOException {
|
||||
TokenStream ts = ctx.wrapTokenStream(super.tokenStream(analyzer, previous));
|
||||
return new CompletionTokenStream(ts, payload, toFiniteStrings);
|
||||
for (String input : inputs) {
|
||||
if (inputMap.containsKey(input) == false || inputMap.get(input).weight < weight) {
|
||||
inputMap.put(input, new CompletionInputMetaData(contextsMap, weight));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse expected text or object got" + token.name());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class CompletionInputMetaData {
|
||||
public final Map<String, Set<CharSequence>> contexts;
|
||||
public final int weight;
|
||||
|
||||
CompletionInputMetaData(Map<String, Set<CharSequence>> contexts, int weight) {
|
||||
this.contexts = contexts;
|
||||
this.weight = weight;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(simpleName())
|
||||
.field(Fields.TYPE, CONTENT_TYPE);
|
||||
|
||||
builder.field(Fields.ANALYZER, fieldType().indexAnalyzer().name());
|
||||
.field(Fields.TYPE.getPreferredName(), CONTENT_TYPE);
|
||||
builder.field(Fields.ANALYZER.getPreferredName(), fieldType().indexAnalyzer().name());
|
||||
if (fieldType().indexAnalyzer().name().equals(fieldType().searchAnalyzer().name()) == false) {
|
||||
builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), fieldType().searchAnalyzer().name());
|
||||
}
|
||||
builder.field(Fields.PAYLOADS, fieldType().analyzingSuggestLookupProvider.hasPayloads());
|
||||
builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), fieldType().analyzingSuggestLookupProvider.getPreserveSep());
|
||||
builder.field(Fields.PRESERVE_POSITION_INCREMENTS.getPreferredName(), fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements());
|
||||
builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), fieldType().preserveSep());
|
||||
builder.field(Fields.PRESERVE_POSITION_INCREMENTS.getPreferredName(), fieldType().preservePositionIncrements());
|
||||
builder.field(Fields.MAX_INPUT_LENGTH.getPreferredName(), this.maxInputLength);
|
||||
multiFields.toXContent(builder, params);
|
||||
|
||||
if(fieldType().requiresContext()) {
|
||||
builder.startObject(Fields.CONTEXT);
|
||||
for (ContextMapping mapping : fieldType().getContextMapping().values()) {
|
||||
builder.value(mapping);
|
||||
}
|
||||
builder.endObject();
|
||||
if (fieldType().hasContextMappings()) {
|
||||
builder.startArray(Fields.CONTEXTS.getPreferredName());
|
||||
fieldType().getContextMappings().toXContent(builder, params);
|
||||
builder.endArray();
|
||||
}
|
||||
|
||||
multiFields.toXContent(builder, params);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
// no-op
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -562,10 +604,6 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
public boolean isStoringPayloads() {
|
||||
return fieldType().analyzingSuggestLookupProvider.hasPayloads();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
|
@ -574,22 +612,4 @@ public class CompletionFieldMapper extends FieldMapper {
|
|||
this.maxInputLength = fieldMergeWith.maxInputLength;
|
||||
}
|
||||
}
|
||||
|
||||
// this should be package private but our tests don't allow it.
|
||||
public static boolean isReservedChar(char character) {
|
||||
/* we use 0x001F as a SEP_LABEL in the suggester but we can use the UTF-16 representation since they
|
||||
* are equivalent. We also don't need to convert the input character to UTF-8 here to check for
|
||||
* the 0x00 end label since all multi-byte UTF-8 chars start with 0x10 binary so if the UTF-16 CP is == 0x00
|
||||
* it's the single byte UTF-8 CP */
|
||||
assert XAnalyzingSuggester.PAYLOAD_SEP == XAnalyzingSuggester.SEP_LABEL; // ensure they are the same!
|
||||
switch(character) {
|
||||
case XAnalyzingSuggester.END_BYTE:
|
||||
case XAnalyzingSuggester.SEP_LABEL:
|
||||
case XAnalyzingSuggester.HOLE_CHARACTER:
|
||||
case ContextMapping.SEPARATOR:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,539 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Iterators;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.doubleField;
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
||||
|
||||
/**
|
||||
* GeoPointFieldMapper base class to maintain backward compatibility
|
||||
*/
|
||||
public abstract class BaseGeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser {
|
||||
public static final String CONTENT_TYPE = "geo_point";
|
||||
|
||||
public static class Names {
|
||||
public static final String LAT = "lat";
|
||||
public static final String LAT_SUFFIX = "." + LAT;
|
||||
public static final String LON = "lon";
|
||||
public static final String LON_SUFFIX = "." + LON;
|
||||
public static final String GEOHASH = "geohash";
|
||||
public static final String GEOHASH_SUFFIX = "." + GEOHASH;
|
||||
public static final String IGNORE_MALFORMED = "ignore_malformed";
|
||||
}
|
||||
|
||||
public static class Defaults {
|
||||
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
|
||||
public static final boolean ENABLE_LATLON = false;
|
||||
public static final boolean ENABLE_GEOHASH = false;
|
||||
public static final boolean ENABLE_GEOHASH_PREFIX = false;
|
||||
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
|
||||
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit(false, false);
|
||||
}
|
||||
|
||||
public abstract static class Builder<T extends Builder, Y extends BaseGeoPointFieldMapper> extends FieldMapper.Builder<T, Y> {
|
||||
protected ContentPath.Type pathType = Defaults.PATH_TYPE;
|
||||
|
||||
protected boolean enableLatLon = Defaults.ENABLE_LATLON;
|
||||
|
||||
protected Integer precisionStep;
|
||||
|
||||
protected boolean enableGeoHash = Defaults.ENABLE_GEOHASH;
|
||||
|
||||
protected boolean enableGeoHashPrefix = Defaults.ENABLE_GEOHASH_PREFIX;
|
||||
|
||||
protected int geoHashPrecision = Defaults.GEO_HASH_PRECISION;
|
||||
|
||||
protected Boolean ignoreMalformed;
|
||||
|
||||
public Builder(String name, GeoPointFieldType fieldType) {
|
||||
super(name, fieldType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointFieldType fieldType() {
|
||||
return (GeoPointFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T multiFieldPathType(ContentPath.Type pathType) {
|
||||
this.pathType = pathType;
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T fieldDataSettings(Settings settings) {
|
||||
this.fieldDataSettings = settings;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T enableLatLon(boolean enableLatLon) {
|
||||
this.enableLatLon = enableLatLon;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T precisionStep(int precisionStep) {
|
||||
this.precisionStep = precisionStep;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T enableGeoHash(boolean enableGeoHash) {
|
||||
this.enableGeoHash = enableGeoHash;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T geoHashPrefix(boolean enableGeoHashPrefix) {
|
||||
this.enableGeoHashPrefix = enableGeoHashPrefix;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T geoHashPrecision(int precision) {
|
||||
this.geoHashPrecision = precision;
|
||||
return builder;
|
||||
}
|
||||
|
||||
public T ignoreMalformed(boolean ignoreMalformed) {
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
|
||||
if (ignoreMalformed != null) {
|
||||
return new Explicit<>(ignoreMalformed, true);
|
||||
}
|
||||
if (context.indexSettings() != null) {
|
||||
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false);
|
||||
}
|
||||
return Defaults.IGNORE_MALFORMED;
|
||||
}
|
||||
|
||||
public abstract Y build(BuilderContext context, String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
|
||||
StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo);
|
||||
|
||||
public Y build(Mapper.BuilderContext context) {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
|
||||
GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType;
|
||||
|
||||
DoubleFieldMapper latMapper = null;
|
||||
DoubleFieldMapper lonMapper = null;
|
||||
|
||||
context.path().add(name);
|
||||
if (enableLatLon) {
|
||||
NumberFieldMapper.Builder<?, ?> latMapperBuilder = doubleField(Names.LAT).includeInAll(false);
|
||||
NumberFieldMapper.Builder<?, ?> lonMapperBuilder = doubleField(Names.LON).includeInAll(false);
|
||||
if (precisionStep != null) {
|
||||
latMapperBuilder.precisionStep(precisionStep);
|
||||
lonMapperBuilder.precisionStep(precisionStep);
|
||||
}
|
||||
latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context);
|
||||
lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context);
|
||||
geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType());
|
||||
}
|
||||
StringFieldMapper geoHashMapper = null;
|
||||
if (enableGeoHash || enableGeoHashPrefix) {
|
||||
// TODO: possible also implicitly enable geohash if geohash precision is set
|
||||
geoHashMapper = stringField(Names.GEOHASH).index(true).tokenized(false).includeInAll(false).store(fieldType.stored())
|
||||
.omitNorms(true).indexOptions(IndexOptions.DOCS).build(context);
|
||||
geoPointFieldType.setGeoHashEnabled(geoHashMapper.fieldType(), geoHashPrecision, enableGeoHashPrefix);
|
||||
}
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
|
||||
return build(context, name, fieldType, defaultFieldType, context.indexSettings(), origPathType,
|
||||
latMapper, lonMapper, geoHashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder;
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (parserContext.indexVersionCreated().onOrBefore(Version.CURRENT)) {
|
||||
builder = new GeoPointFieldMapperLegacy.Builder(name);
|
||||
} else {
|
||||
builder = new GeoPointFieldMapper.Builder(name);
|
||||
}
|
||||
parseField(builder, name, node, parserContext);
|
||||
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object propNode = entry.getValue();
|
||||
if (propName.equals("lat_lon")) {
|
||||
builder.enableLatLon(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("precision_step")) {
|
||||
builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("geohash")) {
|
||||
builder.enableGeoHash(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("geohash_prefix")) {
|
||||
builder.geoHashPrefix(XContentMapValues.nodeBooleanValue(propNode));
|
||||
if (XContentMapValues.nodeBooleanValue(propNode)) {
|
||||
builder.enableGeoHash(true);
|
||||
}
|
||||
iterator.remove();
|
||||
} else if (propName.equals("geohash_precision")) {
|
||||
if (propNode instanceof Integer) {
|
||||
builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode));
|
||||
} else {
|
||||
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString()));
|
||||
}
|
||||
iterator.remove();
|
||||
} else if (propName.equals(Names.IGNORE_MALFORMED)) {
|
||||
builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
if (builder instanceof GeoPointFieldMapperLegacy.Builder) {
|
||||
return GeoPointFieldMapperLegacy.parse((GeoPointFieldMapperLegacy.Builder) builder, node, parserContext);
|
||||
}
|
||||
|
||||
return (GeoPointFieldMapper.Builder) builder;
|
||||
}
|
||||
}
|
||||
|
||||
public static class GeoPointFieldType extends MappedFieldType {
|
||||
protected MappedFieldType geoHashFieldType;
|
||||
protected int geoHashPrecision;
|
||||
protected boolean geoHashPrefixEnabled;
|
||||
|
||||
protected MappedFieldType latFieldType;
|
||||
protected MappedFieldType lonFieldType;
|
||||
|
||||
GeoPointFieldType() {}
|
||||
|
||||
GeoPointFieldType(GeoPointFieldType ref) {
|
||||
super(ref);
|
||||
this.geoHashFieldType = ref.geoHashFieldType; // copying ref is ok, this can never be modified
|
||||
this.geoHashPrecision = ref.geoHashPrecision;
|
||||
this.geoHashPrefixEnabled = ref.geoHashPrefixEnabled;
|
||||
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
|
||||
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType clone() {
|
||||
return new GeoPointFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!super.equals(o)) return false;
|
||||
GeoPointFieldType that = (GeoPointFieldType) o;
|
||||
return geoHashPrecision == that.geoHashPrecision &&
|
||||
geoHashPrefixEnabled == that.geoHashPrefixEnabled &&
|
||||
java.util.Objects.equals(geoHashFieldType, that.geoHashFieldType) &&
|
||||
java.util.Objects.equals(latFieldType, that.latFieldType) &&
|
||||
java.util.Objects.equals(lonFieldType, that.lonFieldType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return java.util.Objects.hash(super.hashCode(), geoHashFieldType, geoHashPrecision, geoHashPrefixEnabled, latFieldType,
|
||||
lonFieldType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
|
||||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
GeoPointFieldType other = (GeoPointFieldType)fieldType;
|
||||
if (isLatLonEnabled() != other.isLatLonEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [lat_lon]");
|
||||
}
|
||||
if (isLatLonEnabled() && other.isLatLonEnabled() &&
|
||||
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]");
|
||||
}
|
||||
if (isGeoHashEnabled() != other.isGeoHashEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [geohash]");
|
||||
}
|
||||
if (geoHashPrecision() != other.geoHashPrecision()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_precision]");
|
||||
}
|
||||
if (isGeoHashPrefixEnabled() != other.isGeoHashPrefixEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_prefix]");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isGeoHashEnabled() {
|
||||
return geoHashFieldType != null;
|
||||
}
|
||||
|
||||
public MappedFieldType geoHashFieldType() {
|
||||
return geoHashFieldType;
|
||||
}
|
||||
|
||||
public int geoHashPrecision() {
|
||||
return geoHashPrecision;
|
||||
}
|
||||
|
||||
public boolean isGeoHashPrefixEnabled() {
|
||||
return geoHashPrefixEnabled;
|
||||
}
|
||||
|
||||
public void setGeoHashEnabled(MappedFieldType geoHashFieldType, int geoHashPrecision, boolean geoHashPrefixEnabled) {
|
||||
checkIfFrozen();
|
||||
this.geoHashFieldType = geoHashFieldType;
|
||||
this.geoHashPrecision = geoHashPrecision;
|
||||
this.geoHashPrefixEnabled = geoHashPrefixEnabled;
|
||||
}
|
||||
|
||||
public boolean isLatLonEnabled() {
|
||||
return latFieldType != null;
|
||||
}
|
||||
|
||||
public MappedFieldType latFieldType() {
|
||||
return latFieldType;
|
||||
}
|
||||
|
||||
public MappedFieldType lonFieldType() {
|
||||
return lonFieldType;
|
||||
}
|
||||
|
||||
public void setLatLonEnabled(MappedFieldType latFieldType, MappedFieldType lonFieldType) {
|
||||
checkIfFrozen();
|
||||
this.latFieldType = latFieldType;
|
||||
this.lonFieldType = lonFieldType;
|
||||
}
|
||||
}
|
||||
|
||||
protected final DoubleFieldMapper latMapper;
|
||||
|
||||
protected final DoubleFieldMapper lonMapper;
|
||||
|
||||
protected final ContentPath.Type pathType;
|
||||
|
||||
protected final StringFieldMapper geoHashMapper;
|
||||
|
||||
protected Explicit<Boolean> ignoreMalformed;
|
||||
|
||||
protected BaseGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
||||
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper,
|
||||
MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
this.pathType = pathType;
|
||||
this.latMapper = latMapper;
|
||||
this.lonMapper = lonMapper;
|
||||
this.geoHashMapper = geoHashMapper;
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointFieldType fieldType() {
|
||||
return (GeoPointFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
|
||||
BaseGeoPointFieldMapper gpfmMergeWith = (BaseGeoPointFieldMapper) mergeWith;
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
if (gpfmMergeWith.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = gpfmMergeWith.ignoreMalformed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Mapper> iterator() {
|
||||
List<Mapper> extras = new ArrayList<>();
|
||||
if (fieldType().isGeoHashEnabled()) {
|
||||
extras.add(geoHashMapper);
|
||||
}
|
||||
if (fieldType().isLatLonEnabled()) {
|
||||
extras.add(latMapper);
|
||||
extras.add(lonMapper);
|
||||
}
|
||||
return Iterators.concat(super.iterator(), extras.iterator());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
|
||||
}
|
||||
|
||||
protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException {
|
||||
if (fieldType().isGeoHashEnabled()) {
|
||||
if (geoHash == null) {
|
||||
geoHash = GeoHashUtils.stringEncode(point.lon(), point.lat());
|
||||
}
|
||||
addGeoHashField(context, geoHash);
|
||||
}
|
||||
if (fieldType().isLatLonEnabled()) {
|
||||
latMapper.parse(context.createExternalValueContext(point.lat()));
|
||||
lonMapper.parse(context.createExternalValueContext(point.lon()));
|
||||
}
|
||||
multiFields.parse(this, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
context.path().add(simpleName());
|
||||
|
||||
GeoPoint sparse = context.parseExternalValue(GeoPoint.class);
|
||||
|
||||
if (sparse != null) {
|
||||
parse(context, sparse, null);
|
||||
} else {
|
||||
sparse = new GeoPoint();
|
||||
XContentParser.Token token = context.parser().currentToken();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
token = context.parser().nextToken();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
// its an array of array of lon/lat [ [1.2, 1.3], [1.4, 1.5] ]
|
||||
while (token != XContentParser.Token.END_ARRAY) {
|
||||
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
|
||||
token = context.parser().nextToken();
|
||||
}
|
||||
} else {
|
||||
// its an array of other possible values
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
double lon = context.parser().doubleValue();
|
||||
token = context.parser().nextToken();
|
||||
double lat = context.parser().doubleValue();
|
||||
while ((token = context.parser().nextToken()) != XContentParser.Token.END_ARRAY);
|
||||
parse(context, sparse.reset(lat, lon), null);
|
||||
} else {
|
||||
while (token != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
parsePointFromString(context, sparse, context.parser().text());
|
||||
} else {
|
||||
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
|
||||
}
|
||||
token = context.parser().nextToken();
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
parsePointFromString(context, sparse, context.parser().text());
|
||||
} else if (token != XContentParser.Token.VALUE_NULL) {
|
||||
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
|
||||
}
|
||||
}
|
||||
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
return null;
|
||||
}
|
||||
|
||||
private void addGeoHashField(ParseContext context, String geoHash) throws IOException {
|
||||
int len = Math.min(fieldType().geoHashPrecision(), geoHash.length());
|
||||
int min = fieldType().isGeoHashPrefixEnabled() ? 1 : len;
|
||||
|
||||
for (int i = len; i >= min; i--) {
|
||||
// side effect of this call is adding the field
|
||||
geoHashMapper.parse(context.createExternalValueContext(geoHash.substring(0, i)));
|
||||
}
|
||||
}
|
||||
|
||||
private void parsePointFromString(ParseContext context, GeoPoint sparse, String point) throws IOException {
|
||||
if (point.indexOf(',') < 0) {
|
||||
parse(context, sparse.resetFromGeoHash(point), point);
|
||||
} else {
|
||||
parse(context, sparse.resetFromString(point), null);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
if (includeDefaults || pathType != Defaults.PATH_TYPE) {
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (includeDefaults || fieldType().isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) {
|
||||
builder.field("lat_lon", fieldType().isLatLonEnabled());
|
||||
}
|
||||
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
|
||||
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
|
||||
}
|
||||
if (includeDefaults || fieldType().isGeoHashEnabled() != Defaults.ENABLE_GEOHASH) {
|
||||
builder.field("geohash", fieldType().isGeoHashEnabled());
|
||||
}
|
||||
if (includeDefaults || fieldType().isGeoHashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) {
|
||||
builder.field("geohash_prefix", fieldType().isGeoHashPrefixEnabled());
|
||||
}
|
||||
if (fieldType().isGeoHashEnabled() && (includeDefaults || fieldType().geoHashPrecision() != Defaults.GEO_HASH_PRECISION)) {
|
||||
builder.field("geohash_precision", fieldType().geoHashPrecision());
|
||||
}
|
||||
if (includeDefaults || ignoreMalformed.explicit()) {
|
||||
builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -19,38 +19,24 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.GeoPointField;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Iterators;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.util.ByteUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.*;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Parsing: We handle:
|
||||
|
@ -62,736 +48,84 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.*;
|
|||
* "lon" : 2.1
|
||||
* }
|
||||
*/
|
||||
public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser {
|
||||
public class GeoPointFieldMapper extends BaseGeoPointFieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "geo_point";
|
||||
|
||||
public static class Names {
|
||||
public static final String LAT = "lat";
|
||||
public static final String LAT_SUFFIX = "." + LAT;
|
||||
public static final String LON = "lon";
|
||||
public static final String LON_SUFFIX = "." + LON;
|
||||
public static final String GEOHASH = "geohash";
|
||||
public static final String GEOHASH_SUFFIX = "." + GEOHASH;
|
||||
public static final String IGNORE_MALFORMED = "ignore_malformed";
|
||||
public static final String COERCE = "coerce";
|
||||
}
|
||||
public static class Defaults extends BaseGeoPointFieldMapper.Defaults {
|
||||
|
||||
public static class Defaults {
|
||||
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
|
||||
public static final boolean ENABLE_LATLON = false;
|
||||
public static final boolean ENABLE_GEOHASH = false;
|
||||
public static final boolean ENABLE_GEOHASH_PREFIX = false;
|
||||
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
|
||||
|
||||
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit(false, false);
|
||||
public static final Explicit<Boolean> COERCE = new Explicit(false, false);
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType();
|
||||
public static final GeoPointFieldType FIELD_TYPE = new GeoPointFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
|
||||
FIELD_TYPE.setTokenized(false);
|
||||
FIELD_TYPE.setOmitNorms(true);
|
||||
FIELD_TYPE.setNumericType(FieldType.NumericType.LONG);
|
||||
FIELD_TYPE.setNumericPrecisionStep(GeoPointField.PRECISION_STEP);
|
||||
FIELD_TYPE.setDocValuesType(DocValuesType.SORTED_NUMERIC);
|
||||
FIELD_TYPE.setHasDocValues(true);
|
||||
FIELD_TYPE.setStored(true);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder, GeoPointFieldMapper> {
|
||||
|
||||
private ContentPath.Type pathType = Defaults.PATH_TYPE;
|
||||
|
||||
private boolean enableGeoHash = Defaults.ENABLE_GEOHASH;
|
||||
|
||||
private boolean enableGeohashPrefix = Defaults.ENABLE_GEOHASH_PREFIX;
|
||||
|
||||
private boolean enableLatLon = Defaults.ENABLE_LATLON;
|
||||
|
||||
private Integer precisionStep;
|
||||
|
||||
private int geoHashPrecision = Defaults.GEO_HASH_PRECISION;
|
||||
|
||||
private Boolean ignoreMalformed;
|
||||
|
||||
private Boolean coerce;
|
||||
/**
|
||||
* Concrete builder for indexed GeoPointField type
|
||||
*/
|
||||
public static class Builder extends BaseGeoPointFieldMapper.Builder<Builder, GeoPointFieldMapper> {
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
public Builder ignoreMalformed(boolean ignoreMalformed) {
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
|
||||
if (ignoreMalformed != null) {
|
||||
return new Explicit<>(ignoreMalformed, true);
|
||||
}
|
||||
if (context.indexSettings() != null) {
|
||||
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false);
|
||||
}
|
||||
return Defaults.IGNORE_MALFORMED;
|
||||
}
|
||||
|
||||
public Builder coerce(boolean coerce) {
|
||||
this.coerce = coerce;
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected Explicit<Boolean> coerce(BuilderContext context) {
|
||||
if (coerce != null) {
|
||||
return new Explicit<>(coerce, true);
|
||||
}
|
||||
if (context.indexSettings() != null) {
|
||||
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false);
|
||||
}
|
||||
return Defaults.COERCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointFieldType fieldType() {
|
||||
return (GeoPointFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder multiFieldPathType(ContentPath.Type pathType) {
|
||||
this.pathType = pathType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder enableGeoHash(boolean enableGeoHash) {
|
||||
this.enableGeoHash = enableGeoHash;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder geohashPrefix(boolean enableGeohashPrefix) {
|
||||
this.enableGeohashPrefix = enableGeohashPrefix;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder enableLatLon(boolean enableLatLon) {
|
||||
this.enableLatLon = enableLatLon;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder precisionStep(int precisionStep) {
|
||||
this.precisionStep = precisionStep;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder geoHashPrecision(int precision) {
|
||||
this.geoHashPrecision = precision;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder fieldDataSettings(Settings settings) {
|
||||
this.fieldDataSettings = settings;
|
||||
return builder;
|
||||
public GeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType,
|
||||
MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper,
|
||||
DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
|
||||
CopyTo copyTo) {
|
||||
fieldType.setTokenized(false);
|
||||
setupFieldType(context);
|
||||
return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper,
|
||||
geoHashMapper, multiFields, ignoreMalformed, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointFieldMapper build(BuilderContext context) {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
|
||||
DoubleFieldMapper latMapper = null;
|
||||
DoubleFieldMapper lonMapper = null;
|
||||
GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType;
|
||||
|
||||
context.path().add(name);
|
||||
if (enableLatLon) {
|
||||
NumberFieldMapper.Builder<?, ?> latMapperBuilder = doubleField(Names.LAT).includeInAll(false);
|
||||
NumberFieldMapper.Builder<?, ?> lonMapperBuilder = doubleField(Names.LON).includeInAll(false);
|
||||
if (precisionStep != null) {
|
||||
latMapperBuilder.precisionStep(precisionStep);
|
||||
lonMapperBuilder.precisionStep(precisionStep);
|
||||
}
|
||||
latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context);
|
||||
lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context);
|
||||
geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType());
|
||||
}
|
||||
StringFieldMapper geohashMapper = null;
|
||||
if (enableGeoHash || enableGeohashPrefix) {
|
||||
// TODO: possible also implicitly enable geohash if geohash precision is set
|
||||
geohashMapper = stringField(Names.GEOHASH).index(true).tokenized(false).includeInAll(false).store(fieldType.stored())
|
||||
.omitNorms(true).indexOptions(IndexOptions.DOCS).build(context);
|
||||
geoPointFieldType.setGeohashEnabled(geohashMapper.fieldType(), geoHashPrecision, enableGeohashPrefix);
|
||||
}
|
||||
context.path().remove();
|
||||
|
||||
context.path().pathType(origPathType);
|
||||
|
||||
// this is important: even if geo points feel like they need to be tokenized to distinguish lat from lon, we actually want to
|
||||
// store them as a single token.
|
||||
fieldType.setTokenized(false);
|
||||
setupFieldType(context);
|
||||
fieldType.setHasDocValues(false);
|
||||
defaultFieldType.setHasDocValues(false);
|
||||
return new GeoPointFieldMapper(name, fieldType, defaultFieldType, context.indexSettings(), origPathType,
|
||||
latMapper, lonMapper, geohashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), coerce(context));
|
||||
return super.build(context);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
public static class TypeParser extends BaseGeoPointFieldMapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder = geoPointField(name);
|
||||
final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||
parseField(builder, name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object propNode = entry.getValue();
|
||||
if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
|
||||
builder.multiFieldPathType(parsePathType(name, propNode.toString()));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("lat_lon")) {
|
||||
builder.enableLatLon(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("geohash")) {
|
||||
builder.enableGeoHash(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("geohash_prefix")) {
|
||||
builder.geohashPrefix(XContentMapValues.nodeBooleanValue(propNode));
|
||||
if (XContentMapValues.nodeBooleanValue(propNode)) {
|
||||
builder.enableGeoHash(true);
|
||||
}
|
||||
iterator.remove();
|
||||
} else if (propName.equals("precision_step")) {
|
||||
builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals("geohash_precision")) {
|
||||
if (propNode instanceof Integer) {
|
||||
builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode));
|
||||
} else {
|
||||
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString()));
|
||||
}
|
||||
iterator.remove();
|
||||
} else if (propName.equals(Names.IGNORE_MALFORMED)) {
|
||||
builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("validate")) {
|
||||
builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("validate_lon")) {
|
||||
builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("validate_lat")) {
|
||||
builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (propName.equals(Names.COERCE)) {
|
||||
builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize")) {
|
||||
builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lat")) {
|
||||
builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lon")) {
|
||||
builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
return super.parse(name, node, parserContext);
|
||||
}
|
||||
}
|
||||
|
||||
public static final class GeoPointFieldType extends MappedFieldType {
|
||||
|
||||
private MappedFieldType geohashFieldType;
|
||||
private int geohashPrecision;
|
||||
private boolean geohashPrefixEnabled;
|
||||
|
||||
private MappedFieldType latFieldType;
|
||||
private MappedFieldType lonFieldType;
|
||||
|
||||
public GeoPointFieldType() {}
|
||||
|
||||
protected GeoPointFieldType(GeoPointFieldType ref) {
|
||||
super(ref);
|
||||
this.geohashFieldType = ref.geohashFieldType; // copying ref is ok, this can never be modified
|
||||
this.geohashPrecision = ref.geohashPrecision;
|
||||
this.geohashPrefixEnabled = ref.geohashPrefixEnabled;
|
||||
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
|
||||
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType clone() {
|
||||
return new GeoPointFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!super.equals(o)) return false;
|
||||
GeoPointFieldType that = (GeoPointFieldType) o;
|
||||
return geohashPrecision == that.geohashPrecision &&
|
||||
geohashPrefixEnabled == that.geohashPrefixEnabled &&
|
||||
java.util.Objects.equals(geohashFieldType, that.geohashFieldType) &&
|
||||
java.util.Objects.equals(latFieldType, that.latFieldType) &&
|
||||
java.util.Objects.equals(lonFieldType, that.lonFieldType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType,
|
||||
lonFieldType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
|
||||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
GeoPointFieldType other = (GeoPointFieldType)fieldType;
|
||||
if (isLatLonEnabled() != other.isLatLonEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [lat_lon]");
|
||||
}
|
||||
if (isGeohashEnabled() != other.isGeohashEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [geohash]");
|
||||
}
|
||||
if (geohashPrecision() != other.geohashPrecision()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_precision]");
|
||||
}
|
||||
if (isGeohashPrefixEnabled() != other.isGeohashPrefixEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_prefix]");
|
||||
}
|
||||
if (isLatLonEnabled() && other.isLatLonEnabled() &&
|
||||
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isGeohashEnabled() {
|
||||
return geohashFieldType != null;
|
||||
}
|
||||
|
||||
public MappedFieldType geohashFieldType() {
|
||||
return geohashFieldType;
|
||||
}
|
||||
|
||||
public int geohashPrecision() {
|
||||
return geohashPrecision;
|
||||
}
|
||||
|
||||
public boolean isGeohashPrefixEnabled() {
|
||||
return geohashPrefixEnabled;
|
||||
}
|
||||
|
||||
public void setGeohashEnabled(MappedFieldType geohashFieldType, int geohashPrecision, boolean geohashPrefixEnabled) {
|
||||
checkIfFrozen();
|
||||
this.geohashFieldType = geohashFieldType;
|
||||
this.geohashPrecision = geohashPrecision;
|
||||
this.geohashPrefixEnabled = geohashPrefixEnabled;
|
||||
}
|
||||
|
||||
public boolean isLatLonEnabled() {
|
||||
return latFieldType != null;
|
||||
}
|
||||
|
||||
public MappedFieldType latFieldType() {
|
||||
return latFieldType;
|
||||
}
|
||||
|
||||
public MappedFieldType lonFieldType() {
|
||||
return lonFieldType;
|
||||
}
|
||||
|
||||
public void setLatLonEnabled(MappedFieldType latFieldType, MappedFieldType lonFieldType) {
|
||||
checkIfFrozen();
|
||||
this.latFieldType = latFieldType;
|
||||
this.lonFieldType = lonFieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint value(Object value) {
|
||||
if (value instanceof GeoPoint) {
|
||||
return (GeoPoint) value;
|
||||
} else {
|
||||
return GeoPoint.parseFromLatLon(value.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A byte-aligned fixed-length encoding for latitudes and longitudes.
|
||||
*/
|
||||
public static final class Encoding {
|
||||
|
||||
// With 14 bytes we already have better precision than a double since a double has 11 bits of exponent
|
||||
private static final int MAX_NUM_BYTES = 14;
|
||||
|
||||
private static final Encoding[] INSTANCES;
|
||||
static {
|
||||
INSTANCES = new Encoding[MAX_NUM_BYTES + 1];
|
||||
for (int numBytes = 2; numBytes <= MAX_NUM_BYTES; numBytes += 2) {
|
||||
INSTANCES[numBytes] = new Encoding(numBytes);
|
||||
}
|
||||
}
|
||||
|
||||
/** Get an instance based on the number of bytes that has been used to encode values. */
|
||||
public static final Encoding of(int numBytesPerValue) {
|
||||
final Encoding instance = INSTANCES[numBytesPerValue];
|
||||
if (instance == null) {
|
||||
throw new IllegalStateException("No encoding for " + numBytesPerValue + " bytes per value");
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
/** Get an instance based on the expected precision. Here are examples of the number of required bytes per value depending on the
|
||||
* expected precision:<ul>
|
||||
* <li>1km: 4 bytes</li>
|
||||
* <li>3m: 6 bytes</li>
|
||||
* <li>1m: 8 bytes</li>
|
||||
* <li>1cm: 8 bytes</li>
|
||||
* <li>1mm: 10 bytes</li></ul> */
|
||||
public static final Encoding of(DistanceUnit.Distance precision) {
|
||||
for (Encoding encoding : INSTANCES) {
|
||||
if (encoding != null && encoding.precision().compareTo(precision) <= 0) {
|
||||
return encoding;
|
||||
}
|
||||
}
|
||||
return INSTANCES[MAX_NUM_BYTES];
|
||||
}
|
||||
|
||||
private final DistanceUnit.Distance precision;
|
||||
private final int numBytes;
|
||||
private final int numBytesPerCoordinate;
|
||||
private final double factor;
|
||||
|
||||
private Encoding(int numBytes) {
|
||||
assert numBytes >= 1 && numBytes <= MAX_NUM_BYTES;
|
||||
assert (numBytes & 1) == 0; // we don't support odd numBytes for the moment
|
||||
this.numBytes = numBytes;
|
||||
this.numBytesPerCoordinate = numBytes / 2;
|
||||
this.factor = Math.pow(2, - numBytesPerCoordinate * 8 + 9);
|
||||
assert (1L << (numBytesPerCoordinate * 8 - 1)) * factor > 180 && (1L << (numBytesPerCoordinate * 8 - 2)) * factor < 180 : numBytesPerCoordinate + " " + factor;
|
||||
if (numBytes == MAX_NUM_BYTES) {
|
||||
// no precision loss compared to a double
|
||||
precision = new DistanceUnit.Distance(0, DistanceUnit.DEFAULT);
|
||||
} else {
|
||||
precision = new DistanceUnit.Distance(
|
||||
GeoDistance.PLANE.calculate(0, 0, factor / 2, factor / 2, DistanceUnit.DEFAULT), // factor/2 because we use Math.round instead of a cast to convert the double to a long
|
||||
DistanceUnit.DEFAULT);
|
||||
}
|
||||
}
|
||||
|
||||
public DistanceUnit.Distance precision() {
|
||||
return precision;
|
||||
}
|
||||
|
||||
/** The number of bytes required to encode a single geo point. */
|
||||
public final int numBytes() {
|
||||
return numBytes;
|
||||
}
|
||||
|
||||
/** The number of bits required to encode a single coordinate of a geo point. */
|
||||
public int numBitsPerCoordinate() {
|
||||
return numBytesPerCoordinate << 3;
|
||||
}
|
||||
|
||||
/** Return the bits that encode a latitude/longitude. */
|
||||
public long encodeCoordinate(double lat) {
|
||||
return Math.round((lat + 180) / factor);
|
||||
}
|
||||
|
||||
/** Decode a sequence of bits into the original coordinate. */
|
||||
public double decodeCoordinate(long bits) {
|
||||
return bits * factor - 180;
|
||||
}
|
||||
|
||||
private void encodeBits(long bits, byte[] out, int offset) {
|
||||
for (int i = 0; i < numBytesPerCoordinate; ++i) {
|
||||
out[offset++] = (byte) bits;
|
||||
bits >>>= 8;
|
||||
}
|
||||
assert bits == 0;
|
||||
}
|
||||
|
||||
private long decodeBits(byte [] in, int offset) {
|
||||
long r = in[offset++] & 0xFFL;
|
||||
for (int i = 1; i < numBytesPerCoordinate; ++i) {
|
||||
r = (in[offset++] & 0xFFL) << (i * 8);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
/** Encode a geo point into a byte-array, over {@link #numBytes()} bytes. */
|
||||
public void encode(double lat, double lon, byte[] out, int offset) {
|
||||
encodeBits(encodeCoordinate(lat), out, offset);
|
||||
encodeBits(encodeCoordinate(lon), out, offset + numBytesPerCoordinate);
|
||||
}
|
||||
|
||||
/** Decode a geo point from a byte-array, reading {@link #numBytes()} bytes. */
|
||||
public GeoPoint decode(byte[] in, int offset, GeoPoint out) {
|
||||
final long latBits = decodeBits(in, offset);
|
||||
final long lonBits = decodeBits(in, offset + numBytesPerCoordinate);
|
||||
return decode(latBits, lonBits, out);
|
||||
}
|
||||
|
||||
/** Decode a geo point from the bits of the encoded latitude and longitudes. */
|
||||
public GeoPoint decode(long latBits, long lonBits, GeoPoint out) {
|
||||
final double lat = decodeCoordinate(latBits);
|
||||
final double lon = decodeCoordinate(lonBits);
|
||||
return out.reset(lat, lon);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private final ContentPath.Type pathType;
|
||||
|
||||
private final DoubleFieldMapper latMapper;
|
||||
|
||||
private final DoubleFieldMapper lonMapper;
|
||||
|
||||
private final StringFieldMapper geohashMapper;
|
||||
|
||||
protected Explicit<Boolean> ignoreMalformed;
|
||||
|
||||
protected Explicit<Boolean> coerce;
|
||||
|
||||
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
||||
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,
|
||||
MultiFields multiFields, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null);
|
||||
this.pathType = pathType;
|
||||
this.latMapper = latMapper;
|
||||
this.lonMapper = lonMapper;
|
||||
this.geohashMapper = geohashMapper;
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
this.coerce = coerce;
|
||||
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
|
||||
StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, geoHashMapper, multiFields,
|
||||
ignoreMalformed, copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointFieldType fieldType() {
|
||||
return (GeoPointFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
|
||||
GeoPointFieldMapper gpfmMergeWith = (GeoPointFieldMapper) mergeWith;
|
||||
if (gpfmMergeWith.coerce.explicit()) {
|
||||
if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different [coerce]");
|
||||
}
|
||||
}
|
||||
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
if (gpfmMergeWith.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = gpfmMergeWith.ignoreMalformed;
|
||||
}
|
||||
if (gpfmMergeWith.coerce.explicit()) {
|
||||
this.coerce = gpfmMergeWith.coerce;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
context.path().add(simpleName());
|
||||
|
||||
GeoPoint sparse = context.parseExternalValue(GeoPoint.class);
|
||||
|
||||
if (sparse != null) {
|
||||
parse(context, sparse, null);
|
||||
} else {
|
||||
sparse = new GeoPoint();
|
||||
XContentParser.Token token = context.parser().currentToken();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
token = context.parser().nextToken();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
// its an array of array of lon/lat [ [1.2, 1.3], [1.4, 1.5] ]
|
||||
while (token != XContentParser.Token.END_ARRAY) {
|
||||
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
|
||||
token = context.parser().nextToken();
|
||||
}
|
||||
} else {
|
||||
// its an array of other possible values
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
double lon = context.parser().doubleValue();
|
||||
token = context.parser().nextToken();
|
||||
double lat = context.parser().doubleValue();
|
||||
while ((token = context.parser().nextToken()) != XContentParser.Token.END_ARRAY);
|
||||
parse(context, sparse.reset(lat, lon), null);
|
||||
} else {
|
||||
while (token != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
parsePointFromString(context, sparse, context.parser().text());
|
||||
} else {
|
||||
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
|
||||
}
|
||||
token = context.parser().nextToken();
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
parsePointFromString(context, sparse, context.parser().text());
|
||||
} else if (token != XContentParser.Token.VALUE_NULL) {
|
||||
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
|
||||
}
|
||||
}
|
||||
|
||||
context.path().remove();
|
||||
context.path().pathType(origPathType);
|
||||
return null;
|
||||
}
|
||||
|
||||
private void addGeohashField(ParseContext context, String geohash) throws IOException {
|
||||
int len = Math.min(fieldType().geohashPrecision(), geohash.length());
|
||||
int min = fieldType().isGeohashPrefixEnabled() ? 1 : len;
|
||||
|
||||
for (int i = len; i >= min; i--) {
|
||||
// side effect of this call is adding the field
|
||||
geohashMapper.parse(context.createExternalValueContext(geohash.substring(0, i)));
|
||||
}
|
||||
}
|
||||
|
||||
private void parsePointFromString(ParseContext context, GeoPoint sparse, String point) throws IOException {
|
||||
if (point.indexOf(',') < 0) {
|
||||
parse(context, sparse.resetFromGeoHash(point), point);
|
||||
} else {
|
||||
parse(context, sparse.resetFromString(point), null);
|
||||
}
|
||||
}
|
||||
|
||||
private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException {
|
||||
boolean validPoint = false;
|
||||
if (coerce.value() == false && ignoreMalformed.value() == false) {
|
||||
protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException {
|
||||
if (ignoreMalformed.value() == false) {
|
||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
|
||||
}
|
||||
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
|
||||
}
|
||||
validPoint = true;
|
||||
}
|
||||
|
||||
if (coerce.value() == true && validPoint == false) {
|
||||
// by setting coerce to false we are assuming all geopoints are already in a valid coordinate system
|
||||
// thus this extra step can be skipped
|
||||
} else {
|
||||
// LUCENE WATCH: This will be folded back into Lucene's GeoPointField
|
||||
GeoUtils.normalizePoint(point, true, true);
|
||||
GeoUtils.normalizePoint(point);
|
||||
}
|
||||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
|
||||
context.doc().add(field);
|
||||
context.doc().add(new GeoPointField(fieldType().names().indexName(), point.lon(), point.lat(), fieldType() ));
|
||||
}
|
||||
if (fieldType().isGeohashEnabled()) {
|
||||
if (geohash == null) {
|
||||
geohash = GeoHashUtils.stringEncode(point.lon(), point.lat());
|
||||
}
|
||||
addGeohashField(context, geohash);
|
||||
}
|
||||
if (fieldType().isLatLonEnabled()) {
|
||||
latMapper.parse(context.createExternalValueContext(point.lat()));
|
||||
lonMapper.parse(context.createExternalValueContext(point.lon()));
|
||||
}
|
||||
if (fieldType().hasDocValues()) {
|
||||
CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().names().indexName());
|
||||
if (field == null) {
|
||||
field = new CustomGeoPointDocValuesField(fieldType().names().indexName(), point.lat(), point.lon());
|
||||
context.doc().addWithKey(fieldType().names().indexName(), field);
|
||||
} else {
|
||||
field.add(point.lat(), point.lon());
|
||||
}
|
||||
}
|
||||
multiFields.parse(this, context);
|
||||
super.parse(context, point, geoHash);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Mapper> iterator() {
|
||||
List<Mapper> extras = new ArrayList<>();
|
||||
if (fieldType().isGeohashEnabled()) {
|
||||
extras.add(geohashMapper);
|
||||
}
|
||||
if (fieldType().isLatLonEnabled()) {
|
||||
extras.add(latMapper);
|
||||
extras.add(lonMapper);
|
||||
}
|
||||
return Iterators.concat(super.iterator(), extras.iterator());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
if (includeDefaults || pathType != Defaults.PATH_TYPE) {
|
||||
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (includeDefaults || fieldType().isLatLonEnabled() != Defaults.ENABLE_LATLON) {
|
||||
builder.field("lat_lon", fieldType().isLatLonEnabled());
|
||||
}
|
||||
if (includeDefaults || fieldType().isGeohashEnabled() != Defaults.ENABLE_GEOHASH) {
|
||||
builder.field("geohash", fieldType().isGeohashEnabled());
|
||||
}
|
||||
if (includeDefaults || fieldType().isGeohashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) {
|
||||
builder.field("geohash_prefix", fieldType().isGeohashPrefixEnabled());
|
||||
}
|
||||
if (fieldType().isGeohashEnabled() && (includeDefaults || fieldType().geohashPrecision() != Defaults.GEO_HASH_PRECISION)) {
|
||||
builder.field("geohash_precision", fieldType().geohashPrecision());
|
||||
}
|
||||
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
|
||||
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
|
||||
}
|
||||
if (includeDefaults || coerce.explicit()) {
|
||||
builder.field(Names.COERCE, coerce.value());
|
||||
}
|
||||
if (includeDefaults || ignoreMalformed.explicit()) {
|
||||
builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value());
|
||||
}
|
||||
}
|
||||
|
||||
public static class CustomGeoPointDocValuesField extends CustomNumericDocValuesField {
|
||||
|
||||
private final ObjectHashSet<GeoPoint> points;
|
||||
|
||||
public CustomGeoPointDocValuesField(String name, double lat, double lon) {
|
||||
super(name);
|
||||
points = new ObjectHashSet<>(2);
|
||||
points.add(new GeoPoint(lat, lon));
|
||||
}
|
||||
|
||||
public void add(double lat, double lon) {
|
||||
points.add(new GeoPoint(lat, lon));
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef binaryValue() {
|
||||
final byte[] bytes = new byte[points.size() * 16];
|
||||
int off = 0;
|
||||
for (Iterator<ObjectCursor<GeoPoint>> it = points.iterator(); it.hasNext(); ) {
|
||||
final GeoPoint point = it.next().value;
|
||||
ByteUtils.writeDoubleLE(point.getLat(), bytes, off);
|
||||
ByteUtils.writeDoubleLE(point.getLon(), bytes, off + 8);
|
||||
off += 16;
|
||||
}
|
||||
return new BytesRef(bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,394 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.util.ByteUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.MergeResult;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
/**
|
||||
* Parsing: We handle:
|
||||
* <p>
|
||||
* - "field" : "geo_hash"
|
||||
* - "field" : "lat,lon"
|
||||
* - "field" : {
|
||||
* "lat" : 1.1,
|
||||
* "lon" : 2.1
|
||||
* }
|
||||
*/
|
||||
public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implements ArrayValueMapperParser {
|
||||
|
||||
public static final String CONTENT_TYPE = "geo_point";
|
||||
|
||||
public static class Names extends BaseGeoPointFieldMapper.Names {
|
||||
public static final String COERCE = "coerce";
|
||||
}
|
||||
|
||||
public static class Defaults extends BaseGeoPointFieldMapper.Defaults{
|
||||
public static final Explicit<Boolean> COERCE = new Explicit(false, false);
|
||||
|
||||
public static final GeoPointFieldType FIELD_TYPE = new GeoPointFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
|
||||
FIELD_TYPE.setTokenized(false);
|
||||
FIELD_TYPE.setOmitNorms(true);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Concrete builder for legacy GeoPointField
|
||||
*/
|
||||
public static class Builder extends BaseGeoPointFieldMapper.Builder<Builder, GeoPointFieldMapperLegacy> {
|
||||
|
||||
private Boolean coerce;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE);
|
||||
this.builder = this;
|
||||
}
|
||||
|
||||
public Builder coerce(boolean coerce) {
|
||||
this.coerce = coerce;
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected Explicit<Boolean> coerce(BuilderContext context) {
|
||||
if (coerce != null) {
|
||||
return new Explicit<>(coerce, true);
|
||||
}
|
||||
if (context.indexSettings() != null) {
|
||||
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false);
|
||||
}
|
||||
return Defaults.COERCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointFieldMapperLegacy build(BuilderContext context, String simpleName, MappedFieldType fieldType,
|
||||
MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper,
|
||||
DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
|
||||
CopyTo copyTo) {
|
||||
fieldType.setTokenized(false);
|
||||
setupFieldType(context);
|
||||
fieldType.setHasDocValues(false);
|
||||
defaultFieldType.setHasDocValues(false);
|
||||
return new GeoPointFieldMapperLegacy(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper,
|
||||
geoHashMapper, multiFields, ignoreMalformed, coerce(context), copyTo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointFieldMapperLegacy build(BuilderContext context) {
|
||||
return super.build(context);
|
||||
}
|
||||
}
|
||||
|
||||
public static Builder parse(Builder builder, Map<String, Object> node, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException {
|
||||
final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object propNode = entry.getValue();
|
||||
if (indexCreatedBeforeV2_0 && propName.equals("validate")) {
|
||||
builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("validate_lon")) {
|
||||
builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("validate_lat")) {
|
||||
builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (propName.equals(Names.COERCE)) {
|
||||
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize")) {
|
||||
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lat")) {
|
||||
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lon")) {
|
||||
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* A byte-aligned fixed-length encoding for latitudes and longitudes.
|
||||
*/
|
||||
public static final class Encoding {
|
||||
|
||||
// With 14 bytes we already have better precision than a double since a double has 11 bits of exponent
|
||||
private static final int MAX_NUM_BYTES = 14;
|
||||
|
||||
private static final Encoding[] INSTANCES;
|
||||
static {
|
||||
INSTANCES = new Encoding[MAX_NUM_BYTES + 1];
|
||||
for (int numBytes = 2; numBytes <= MAX_NUM_BYTES; numBytes += 2) {
|
||||
INSTANCES[numBytes] = new Encoding(numBytes);
|
||||
}
|
||||
}
|
||||
|
||||
/** Get an instance based on the number of bytes that has been used to encode values. */
|
||||
public static final Encoding of(int numBytesPerValue) {
|
||||
final Encoding instance = INSTANCES[numBytesPerValue];
|
||||
if (instance == null) {
|
||||
throw new IllegalStateException("No encoding for " + numBytesPerValue + " bytes per value");
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
/** Get an instance based on the expected precision. Here are examples of the number of required bytes per value depending on the
|
||||
* expected precision:<ul>
|
||||
* <li>1km: 4 bytes</li>
|
||||
* <li>3m: 6 bytes</li>
|
||||
* <li>1m: 8 bytes</li>
|
||||
* <li>1cm: 8 bytes</li>
|
||||
* <li>1mm: 10 bytes</li></ul> */
|
||||
public static final Encoding of(DistanceUnit.Distance precision) {
|
||||
for (Encoding encoding : INSTANCES) {
|
||||
if (encoding != null && encoding.precision().compareTo(precision) <= 0) {
|
||||
return encoding;
|
||||
}
|
||||
}
|
||||
return INSTANCES[MAX_NUM_BYTES];
|
||||
}
|
||||
|
||||
private final DistanceUnit.Distance precision;
|
||||
private final int numBytes;
|
||||
private final int numBytesPerCoordinate;
|
||||
private final double factor;
|
||||
|
||||
private Encoding(int numBytes) {
|
||||
assert numBytes >= 1 && numBytes <= MAX_NUM_BYTES;
|
||||
assert (numBytes & 1) == 0; // we don't support odd numBytes for the moment
|
||||
this.numBytes = numBytes;
|
||||
this.numBytesPerCoordinate = numBytes / 2;
|
||||
this.factor = Math.pow(2, - numBytesPerCoordinate * 8 + 9);
|
||||
assert (1L << (numBytesPerCoordinate * 8 - 1)) * factor > 180 && (1L << (numBytesPerCoordinate * 8 - 2)) * factor < 180 : numBytesPerCoordinate + " " + factor;
|
||||
if (numBytes == MAX_NUM_BYTES) {
|
||||
// no precision loss compared to a double
|
||||
precision = new DistanceUnit.Distance(0, DistanceUnit.DEFAULT);
|
||||
} else {
|
||||
precision = new DistanceUnit.Distance(
|
||||
GeoDistance.PLANE.calculate(0, 0, factor / 2, factor / 2, DistanceUnit.DEFAULT), // factor/2 because we use Math.round instead of a cast to convert the double to a long
|
||||
DistanceUnit.DEFAULT);
|
||||
}
|
||||
}
|
||||
|
||||
public DistanceUnit.Distance precision() {
|
||||
return precision;
|
||||
}
|
||||
|
||||
/** The number of bytes required to encode a single geo point. */
|
||||
public final int numBytes() {
|
||||
return numBytes;
|
||||
}
|
||||
|
||||
/** The number of bits required to encode a single coordinate of a geo point. */
|
||||
public int numBitsPerCoordinate() {
|
||||
return numBytesPerCoordinate << 3;
|
||||
}
|
||||
|
||||
/** Return the bits that encode a latitude/longitude. */
|
||||
public long encodeCoordinate(double lat) {
|
||||
return Math.round((lat + 180) / factor);
|
||||
}
|
||||
|
||||
/** Decode a sequence of bits into the original coordinate. */
|
||||
public double decodeCoordinate(long bits) {
|
||||
return bits * factor - 180;
|
||||
}
|
||||
|
||||
private void encodeBits(long bits, byte[] out, int offset) {
|
||||
for (int i = 0; i < numBytesPerCoordinate; ++i) {
|
||||
out[offset++] = (byte) bits;
|
||||
bits >>>= 8;
|
||||
}
|
||||
assert bits == 0;
|
||||
}
|
||||
|
||||
private long decodeBits(byte [] in, int offset) {
|
||||
long r = in[offset++] & 0xFFL;
|
||||
for (int i = 1; i < numBytesPerCoordinate; ++i) {
|
||||
r = (in[offset++] & 0xFFL) << (i * 8);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
/** Encode a geo point into a byte-array, over {@link #numBytes()} bytes. */
|
||||
public void encode(double lat, double lon, byte[] out, int offset) {
|
||||
encodeBits(encodeCoordinate(lat), out, offset);
|
||||
encodeBits(encodeCoordinate(lon), out, offset + numBytesPerCoordinate);
|
||||
}
|
||||
|
||||
/** Decode a geo point from a byte-array, reading {@link #numBytes()} bytes. */
|
||||
public GeoPoint decode(byte[] in, int offset, GeoPoint out) {
|
||||
final long latBits = decodeBits(in, offset);
|
||||
final long lonBits = decodeBits(in, offset + numBytesPerCoordinate);
|
||||
return decode(latBits, lonBits, out);
|
||||
}
|
||||
|
||||
/** Decode a geo point from the bits of the encoded latitude and longitudes. */
|
||||
public GeoPoint decode(long latBits, long lonBits, GeoPoint out) {
|
||||
final double lat = decodeCoordinate(latBits);
|
||||
final double lon = decodeCoordinate(lonBits);
|
||||
return out.reset(lat, lon);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected Explicit<Boolean> coerce;
|
||||
|
||||
public GeoPointFieldMapperLegacy(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
||||
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
|
||||
StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> coerce, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, geoHashMapper, multiFields,
|
||||
ignoreMalformed, copyTo);
|
||||
this.coerce = coerce;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
super.merge(mergeWith, mergeResult);
|
||||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
|
||||
GeoPointFieldMapperLegacy gpfmMergeWith = (GeoPointFieldMapperLegacy) mergeWith;
|
||||
if (gpfmMergeWith.coerce.explicit()) {
|
||||
if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different [coerce]");
|
||||
}
|
||||
}
|
||||
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
if (gpfmMergeWith.coerce.explicit()) {
|
||||
this.coerce = gpfmMergeWith.coerce;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException {
|
||||
boolean validPoint = false;
|
||||
if (coerce.value() == false && ignoreMalformed.value() == false) {
|
||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
|
||||
}
|
||||
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
|
||||
}
|
||||
validPoint = true;
|
||||
}
|
||||
|
||||
if (coerce.value() == true && validPoint == false) {
|
||||
// by setting coerce to false we are assuming all geopoints are already in a valid coordinate system
|
||||
// thus this extra step can be skipped
|
||||
GeoUtils.normalizePoint(point, true, true);
|
||||
}
|
||||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
|
||||
context.doc().add(field);
|
||||
}
|
||||
|
||||
super.parse(context, point, geoHash);
|
||||
|
||||
if (fieldType().hasDocValues()) {
|
||||
CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().names().indexName());
|
||||
if (field == null) {
|
||||
field = new CustomGeoPointDocValuesField(fieldType().names().indexName(), point.lat(), point.lon());
|
||||
context.doc().addWithKey(fieldType().names().indexName(), field);
|
||||
} else {
|
||||
field.add(point.lat(), point.lon());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
if (includeDefaults || coerce.explicit()) {
|
||||
builder.field(Names.COERCE, coerce.value());
|
||||
}
|
||||
}
|
||||
|
||||
public static class CustomGeoPointDocValuesField extends CustomNumericDocValuesField {
|
||||
|
||||
private final ObjectHashSet<GeoPoint> points;
|
||||
|
||||
public CustomGeoPointDocValuesField(String name, double lat, double lon) {
|
||||
super(name);
|
||||
points = new ObjectHashSet<>(2);
|
||||
points.add(new GeoPoint(lat, lon));
|
||||
}
|
||||
|
||||
public void add(double lat, double lon) {
|
||||
points.add(new GeoPoint(lat, lon));
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef binaryValue() {
|
||||
final byte[] bytes = new byte[points.size() * 16];
|
||||
int off = 0;
|
||||
for (Iterator<ObjectCursor<GeoPoint>> it = points.iterator(); it.hasNext(); ) {
|
||||
final GeoPoint point = it.next().value;
|
||||
ByteUtils.writeDoubleLE(point.getLat(), bytes, off);
|
||||
ByteUtils.writeDoubleLE(point.getLon(), bytes, off + 8);
|
||||
off += 16;
|
||||
}
|
||||
return new BytesRef(bytes);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -220,8 +220,8 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
return super.termsQuery(values, context);
|
||||
}
|
||||
|
||||
List<String> types = new ArrayList<>(context.mapperService().types().size());
|
||||
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) {
|
||||
List<String> types = new ArrayList<>(context.getMapperService().types().size());
|
||||
for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) {
|
||||
if (!documentMapper.parentFieldMapper().active()) {
|
||||
types.add(documentMapper.type());
|
||||
}
|
||||
|
|
|
@ -355,7 +355,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
|
|||
try {
|
||||
clone = (ObjectMapper) super.clone();
|
||||
} catch (CloneNotSupportedException e) {
|
||||
throw new RuntimeException();
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
|
|||
import org.elasticsearch.index.mapper.DocumentTypeListener;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -69,7 +68,6 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
|
|||
public final String MAP_UNMAPPED_FIELDS_AS_STRING = "index.percolator.map_unmapped_fields_as_string";
|
||||
|
||||
// This is a shard level service, but these below are index level service:
|
||||
private final IndexQueryParserService queryParserService;
|
||||
private final MapperService mapperService;
|
||||
private final IndexFieldDataService indexFieldDataService;
|
||||
|
||||
|
@ -79,18 +77,20 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
|
|||
private final RealTimePercolatorOperationListener realTimePercolatorOperationListener = new RealTimePercolatorOperationListener();
|
||||
private final PercolateTypeListener percolateTypeListener = new PercolateTypeListener();
|
||||
private final AtomicBoolean realTimePercolatorEnabled = new AtomicBoolean(false);
|
||||
private final QueryShardContext queryShardContext;
|
||||
private boolean mapUnmappedFieldsAsString;
|
||||
private final MeanMetric percolateMetric = new MeanMetric();
|
||||
private final CounterMetric currentMetric = new CounterMetric();
|
||||
private final CounterMetric numberOfQueries = new CounterMetric();
|
||||
|
||||
public PercolatorQueriesRegistry(ShardId shardId, IndexSettings indexSettings, IndexQueryParserService queryParserService,
|
||||
public PercolatorQueriesRegistry(ShardId shardId, IndexSettings indexSettings,
|
||||
ShardIndexingService indexingService, MapperService mapperService,
|
||||
QueryShardContext queryShardContext,
|
||||
IndexFieldDataService indexFieldDataService) {
|
||||
super(shardId, indexSettings);
|
||||
this.queryParserService = queryParserService;
|
||||
this.mapperService = mapperService;
|
||||
this.indexingService = indexingService;
|
||||
this.queryShardContext = queryShardContext;
|
||||
this.indexFieldDataService = indexFieldDataService;
|
||||
this.mapUnmappedFieldsAsString = this.indexSettings.getAsBoolean(MAP_UNMAPPED_FIELDS_AS_STRING, false);
|
||||
mapperService.addTypeListener(percolateTypeListener);
|
||||
|
@ -179,7 +179,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
|
|||
if (type != null) {
|
||||
previousTypes = QueryShardContext.setTypesWithPrevious(type);
|
||||
}
|
||||
QueryShardContext context = queryParserService.getShardContext();
|
||||
QueryShardContext context = new QueryShardContext(queryShardContext);
|
||||
try {
|
||||
context.reset(parser);
|
||||
// This means that fields in the query need to exist in the mapping prior to registering this query
|
||||
|
@ -196,7 +196,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent
|
|||
// as an analyzed string.
|
||||
context.setAllowUnmappedFields(false);
|
||||
context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString);
|
||||
return queryParserService.parseInnerQuery(context);
|
||||
return context.parseInnerQuery();
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Failed to parse", e);
|
||||
} finally {
|
||||
|
|
|
@ -245,10 +245,10 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder<CommonTermsQue
|
|||
if (fieldType != null) {
|
||||
analyzerObj = context.getSearchAnalyzer(fieldType);
|
||||
} else {
|
||||
analyzerObj = context.mapperService().searchAnalyzer();
|
||||
analyzerObj = context.getMapperService().searchAnalyzer();
|
||||
}
|
||||
} else {
|
||||
analyzerObj = context.mapperService().analysisService().analyzer(analyzer);
|
||||
analyzerObj = context.getMapperService().analysisService().analyzer(analyzer);
|
||||
if (analyzerObj == null) {
|
||||
throw new QueryShardException(context, "[common] analyzer [" + analyzer + "] not found");
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
|
|||
}
|
||||
|
||||
public static Query newFilter(QueryShardContext context, String fieldPattern) {
|
||||
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)context.mapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)context.getMapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
if (fieldNamesFieldType == null) {
|
||||
// can only happen when no types exist, so no docs exist either
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.GeoPointInBBoxQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Numbers;
|
||||
|
@ -29,7 +30,8 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
|
||||
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
|
||||
import org.elasticsearch.index.search.geo.IndexedGeoBoundingBoxQuery;
|
||||
|
||||
|
@ -232,6 +234,14 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
|
|||
|
||||
@Override
|
||||
public Query doToQuery(QueryShardContext context) {
|
||||
MappedFieldType fieldType = context.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
|
||||
QueryValidationException exception = checkLatLon(context.indexVersionCreated().before(Version.V_2_0_0));
|
||||
if (exception != null) {
|
||||
throw new QueryShardException(context, "couldn't validate latitude/ longitude values", exception);
|
||||
|
@ -254,30 +264,27 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
|
|||
}
|
||||
}
|
||||
|
||||
MappedFieldType fieldType = context.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
|
||||
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
|
||||
if (context.indexVersionCreated().after(Version.CURRENT)) {
|
||||
return new GeoPointInBBoxQuery(fieldType.names().fullName(), topLeft.lon(), bottomRight.lat(), bottomRight.lon(), topLeft.lat());
|
||||
}
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
|
||||
Query result;
|
||||
Query query;
|
||||
switch(type) {
|
||||
case INDEXED:
|
||||
result = IndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType);
|
||||
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
|
||||
query = IndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType);
|
||||
break;
|
||||
case MEMORY:
|
||||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||
result = new InMemoryGeoBoundingBoxQuery(luceneTopLeft, luceneBottomRight, indexFieldData);
|
||||
query = new InMemoryGeoBoundingBoxQuery(luceneTopLeft, luceneBottomRight, indexFieldData);
|
||||
break;
|
||||
default:
|
||||
// Someone extended the type enum w/o adjusting this switch statement.
|
||||
throw new IllegalStateException("geo bounding box type [" + type + "] not supported.");
|
||||
}
|
||||
|
||||
return result;
|
||||
return query;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.GeoPointDistanceQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -31,7 +32,8 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
|
||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -203,6 +205,15 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
|
|||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext shardContext) throws IOException {
|
||||
MappedFieldType fieldType = shardContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryShardException(shardContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryShardException(shardContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
|
||||
QueryValidationException exception = checkLatLon(shardContext.indexVersionCreated().before(Version.V_2_0_0));
|
||||
if (exception != null) {
|
||||
throw new QueryShardException(shardContext, "couldn't validate latitude/ longitude values", exception);
|
||||
|
@ -214,18 +225,15 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
|
|||
|
||||
double normDistance = geoDistance.normalize(this.distance, DistanceUnit.DEFAULT);
|
||||
|
||||
MappedFieldType fieldType = shardContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryShardException(shardContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
|
||||
if (shardContext.indexVersionCreated().onOrBefore(Version.CURRENT)) {
|
||||
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
|
||||
IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType);
|
||||
return new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
|
||||
}
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryShardException(shardContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType);
|
||||
Query query = new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
|
||||
return query;
|
||||
normDistance = GeoUtils.maxRadialDistance(center, normDistance);
|
||||
return new GeoPointDistanceQuery(fieldType.names().fullName(), center.lon(), center.lat(), normDistance);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.GeoPointDistanceRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -31,13 +32,16 @@ import org.elasticsearch.common.unit.DistanceUnit;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
|
||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.apache.lucene.util.GeoUtils.TOLERANCE;
|
||||
|
||||
public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistanceRangeQueryBuilder> {
|
||||
|
||||
public static final String NAME = "geo_distance_range";
|
||||
|
@ -208,6 +212,13 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
MappedFieldType fieldType = context.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
|
||||
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
|
||||
// validation was not available prior to 2.x, so to support bwc
|
||||
|
@ -235,7 +246,10 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
fromValue = DistanceUnit.parse((String) from, unit, DistanceUnit.DEFAULT);
|
||||
}
|
||||
fromValue = geoDistance.normalize(fromValue, DistanceUnit.DEFAULT);
|
||||
} else {
|
||||
fromValue = new Double(0);
|
||||
}
|
||||
|
||||
if (to != null) {
|
||||
if (to instanceof Number) {
|
||||
toValue = unit.toMeters(((Number) to).doubleValue());
|
||||
|
@ -243,20 +257,21 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
toValue = DistanceUnit.parse((String) to, unit, DistanceUnit.DEFAULT);
|
||||
}
|
||||
toValue = geoDistance.normalize(toValue, DistanceUnit.DEFAULT);
|
||||
} else {
|
||||
toValue = GeoUtils.maxRadialDistance(point);
|
||||
}
|
||||
|
||||
MappedFieldType fieldType = context.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
|
||||
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
|
||||
if (context.indexVersionCreated().onOrBefore(Version.CURRENT)) {
|
||||
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
|
||||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||
return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType,
|
||||
indexFieldData, optimizeBbox);
|
||||
}
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||
return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType,
|
||||
indexFieldData, optimizeBbox);
|
||||
return new GeoPointDistanceRangeQuery(fieldType.names().fullName(), point.lon(), point.lat(),
|
||||
(includeLower) ? fromValue : fromValue + TOLERANCE,
|
||||
(includeUpper) ? toValue : toValue - TOLERANCE);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.GeoPointInPolygonQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -29,7 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -99,11 +100,19 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
|
|||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
MappedFieldType fieldType = context.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
|
||||
List<GeoPoint> shell = new ArrayList<GeoPoint>();
|
||||
for (GeoPoint geoPoint : this.shell) {
|
||||
shell.add(new GeoPoint(geoPoint));
|
||||
}
|
||||
final int shellSize = shell.size();
|
||||
|
||||
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
|
||||
// validation was not available prior to 2.x, so to support bwc
|
||||
|
@ -127,16 +136,21 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
|
|||
}
|
||||
}
|
||||
|
||||
MappedFieldType fieldType = context.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
|
||||
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
|
||||
if (context.indexVersionCreated().onOrBefore(Version.CURRENT)) {
|
||||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||
return new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shellSize]));
|
||||
}
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||
return new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shell.size()]));
|
||||
double[] lats = new double[shellSize];
|
||||
double[] lons = new double[shellSize];
|
||||
GeoPoint p;
|
||||
for (int i=0; i<shellSize; ++i) {
|
||||
p = new GeoPoint(shell.get(i));
|
||||
lats[i] = p.lat();
|
||||
lons[i] = p.lon();
|
||||
}
|
||||
return new GeoPointInPolygonQuery(fieldType.names().fullName(), lons, lats);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -75,8 +75,8 @@ public class GeohashCellQuery {
|
|||
* @param geohashes optional array of additional geohashes
|
||||
* @return a new GeoBoundinboxfilter
|
||||
*/
|
||||
public static Query create(QueryShardContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) {
|
||||
MappedFieldType geoHashMapper = fieldType.geohashFieldType();
|
||||
public static Query create(QueryShardContext context, BaseGeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) {
|
||||
MappedFieldType geoHashMapper = fieldType.geoHashFieldType();
|
||||
if (geoHashMapper == null) {
|
||||
throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled");
|
||||
}
|
||||
|
@ -185,15 +185,15 @@ public class GeohashCellQuery {
|
|||
MappedFieldType fieldType = context.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryShardException(context, "failed to parse [{}] query. missing [{}] field [{}]", NAME,
|
||||
GeoPointFieldMapper.CONTENT_TYPE, fieldName);
|
||||
BaseGeoPointFieldMapper.CONTENT_TYPE, fieldName);
|
||||
}
|
||||
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryShardException(context, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName);
|
||||
}
|
||||
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
if (!geoFieldType.isGeohashPrefixEnabled()) {
|
||||
BaseGeoPointFieldMapper.GeoPointFieldType geoFieldType = ((BaseGeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
if (!geoFieldType.isGeoHashPrefixEnabled()) {
|
||||
throw new QueryShardException(context, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME,
|
||||
fieldName);
|
||||
}
|
||||
|
|
|
@ -217,7 +217,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
}
|
||||
innerQuery.setBoost(boost);
|
||||
|
||||
DocumentMapper childDocMapper = context.mapperService().documentMapper(type);
|
||||
DocumentMapper childDocMapper = context.getMapperService().documentMapper(type);
|
||||
if (childDocMapper == null) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]");
|
||||
}
|
||||
|
@ -231,10 +231,10 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalStateException("start object expected but was: [" + token + "]");
|
||||
}
|
||||
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser);
|
||||
InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
|
||||
if (innerHits != null) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), childDocMapper);
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.getMapperService(), childDocMapper);
|
||||
String name = innerHits.getName() != null ? innerHits.getName() : type;
|
||||
context.addInnerHits(name, parentChildInnerHits);
|
||||
}
|
||||
|
@ -242,7 +242,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
}
|
||||
|
||||
String parentType = parentFieldMapper.type();
|
||||
DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType);
|
||||
DocumentMapper parentDocMapper = context.getMapperService().documentMapper(parentType);
|
||||
if (parentDocMapper == null) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] Type [" + type + "] points to a non existent parent type ["
|
||||
+ parentType + "]");
|
||||
|
|
|
@ -130,7 +130,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
return null;
|
||||
}
|
||||
innerQuery.setBoost(boost);
|
||||
DocumentMapper parentDocMapper = context.mapperService().documentMapper(type);
|
||||
DocumentMapper parentDocMapper = context.getMapperService().documentMapper(type);
|
||||
if (parentDocMapper == null) {
|
||||
throw new QueryShardException(context, "[has_parent] query configured 'parent_type' [" + type
|
||||
+ "] is not a valid type");
|
||||
|
@ -142,10 +142,10 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalStateException("start object expected but was: [" + token + "]");
|
||||
}
|
||||
InnerHitsSubSearchContext innerHits = context.indexQueryParserService().getInnerHitsQueryParserHelper().parse(parser);
|
||||
InnerHitsSubSearchContext innerHits = context.getInnerHitsContext(parser);
|
||||
if (innerHits != null) {
|
||||
ParsedQuery parsedQuery = new ParsedQuery(innerQuery, context.copyNamedQueries());
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.mapperService(), parentDocMapper);
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.getSubSearchContext(), parsedQuery, null, context.getMapperService(), parentDocMapper);
|
||||
String name = innerHits.getName() != null ? innerHits.getName() : type;
|
||||
context.addInnerHits(name, parentChildInnerHits);
|
||||
}
|
||||
|
@ -155,10 +155,10 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
Set<String> parentTypes = new HashSet<>(5);
|
||||
parentTypes.add(parentDocMapper.type());
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = null;
|
||||
for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) {
|
||||
for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) {
|
||||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||
if (parentFieldMapper.active()) {
|
||||
DocumentMapper parentTypeDocumentMapper = context.mapperService().documentMapper(parentFieldMapper.type());
|
||||
DocumentMapper parentTypeDocumentMapper = context.getMapperService().documentMapper(parentFieldMapper.type());
|
||||
parentChildIndexFieldData = context.getForField(parentFieldMapper.fieldType());
|
||||
if (parentTypeDocumentMapper == null) {
|
||||
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
|
||||
|
@ -172,14 +172,14 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
|||
|
||||
Query parentTypeQuery = null;
|
||||
if (parentTypes.size() == 1) {
|
||||
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypes.iterator().next());
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(parentTypes.iterator().next());
|
||||
if (documentMapper != null) {
|
||||
parentTypeQuery = documentMapper.typeFilter();
|
||||
}
|
||||
} else {
|
||||
BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder();
|
||||
for (String parentTypeStr : parentTypes) {
|
||||
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypeStr);
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(parentTypeStr);
|
||||
if (documentMapper != null) {
|
||||
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
|
|
|
@ -115,7 +115,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||
if (types.length == 0) {
|
||||
typesForQuery = context.queryTypes();
|
||||
} else if (types.length == 1 && MetaData.ALL.equals(types[0])) {
|
||||
typesForQuery = context.mapperService().types();
|
||||
typesForQuery = context.getMapperService().types();
|
||||
} else {
|
||||
typesForQuery = new HashSet<>();
|
||||
Collections.addAll(typesForQuery, types);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue