mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-27 10:28:28 +00:00
Merge branch 'master' into feature/ingest
This commit is contained in:
commit
f214271d89
@ -84,7 +84,9 @@ Please follow these formatting guidelines:
|
||||
* Line width is 140 characters
|
||||
* The rest is left to Java coding standards
|
||||
* Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do.
|
||||
* Don't worry too much about imports. Try not to change the order but don't worry about fighting your IDE to stop it from switching from * imports to specific imports or from specific to * imports.
|
||||
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. Please attempt to tame your IDE so it doesn't make them and please send a PR against this document with instructions for your IDE if it doesn't contain them.
|
||||
* Eclipse: Preferences->Java->Code Style->Organize Imports. There are two boxes labeled "`Number of (static )? imports needed for .*`". Set their values to 99999 or some other absurdly high value.
|
||||
* Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so.
|
||||
|
||||
To create a distribution from the source, simply run:
|
||||
|
||||
|
34
build.gradle
34
build.gradle
@ -123,16 +123,6 @@ subprojects {
|
||||
}
|
||||
}
|
||||
}
|
||||
// For reasons we don't fully understand yet, external dependencies are not picked up by Ant's optional tasks.
|
||||
// But you can easily do it in another way.
|
||||
// Only if your buildscript and Ant's optional task need the same library would you have to define it twice.
|
||||
// https://docs.gradle.org/current/userguide/organizing_build_logic.html
|
||||
configurations {
|
||||
forbiddenApis
|
||||
}
|
||||
dependencies {
|
||||
forbiddenApis 'de.thetaphi:forbiddenapis:2.0'
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure similar tasks in dependent projects run first. The projectsEvaluated here is
|
||||
@ -179,6 +169,30 @@ gradle.projectsEvaluated {
|
||||
// intellij configuration
|
||||
allprojects {
|
||||
apply plugin: 'idea'
|
||||
|
||||
idea {
|
||||
module {
|
||||
// same as for the IntelliJ Gradle tooling integration
|
||||
inheritOutputDirs = false
|
||||
outputDir = file('build/classes/main')
|
||||
testOutputDir = file('build/classes/test')
|
||||
|
||||
iml {
|
||||
// fix so that Gradle idea plugin properly generates support for resource folders
|
||||
// see also https://issues.gradle.org/browse/GRADLE-2975
|
||||
withXml {
|
||||
it.asNode().component.content.sourceFolder.findAll { it.@url == 'file://$MODULE_DIR$/src/main/resources' }.each {
|
||||
it.attributes().remove('isTestSource')
|
||||
it.attributes().put('type', 'java-resource')
|
||||
}
|
||||
it.asNode().component.content.sourceFolder.findAll { it.@url == 'file://$MODULE_DIR$/src/test/resources' }.each {
|
||||
it.attributes().remove('isTestSource')
|
||||
it.attributes().put('type', 'java-test-resource')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
idea {
|
||||
|
@ -63,6 +63,7 @@ dependencies {
|
||||
compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE....
|
||||
compile 'de.thetaphi:forbiddenapis:2.0'
|
||||
compile 'com.bmuschko:gradle-nexus-plugin:2.3.1'
|
||||
compile 'org.apache.rat:apache-rat:0.11'
|
||||
}
|
||||
|
||||
processResources {
|
||||
|
@ -2,7 +2,6 @@ package com.carrotsearch.gradle.junit4
|
||||
|
||||
import com.carrotsearch.ant.tasks.junit4.ListenersList
|
||||
import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener
|
||||
import com.esotericsoftware.kryo.serializers.FieldSerializer
|
||||
import groovy.xml.NamespaceBuilder
|
||||
import groovy.xml.NamespaceBuilderSupport
|
||||
import org.apache.tools.ant.BuildException
|
||||
@ -14,7 +13,10 @@ import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.file.FileTreeElement
|
||||
import org.gradle.api.internal.tasks.options.Option
|
||||
import org.gradle.api.specs.Spec
|
||||
import org.gradle.api.tasks.*
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputDirectory
|
||||
import org.gradle.api.tasks.Optional
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.api.tasks.util.PatternFilterable
|
||||
import org.gradle.api.tasks.util.PatternSet
|
||||
import org.gradle.logging.ProgressLoggerFactory
|
||||
|
@ -27,10 +27,13 @@ import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedTestResultE
|
||||
import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener
|
||||
import org.gradle.logging.ProgressLogger
|
||||
import org.gradle.logging.ProgressLoggerFactory
|
||||
import org.junit.runner.Description
|
||||
|
||||
import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.*
|
||||
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds
|
||||
import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.ERROR
|
||||
import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.FAILURE
|
||||
import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.IGNORED
|
||||
import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.IGNORED_ASSUMPTION
|
||||
import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.OK
|
||||
import static java.lang.Math.max
|
||||
|
||||
/**
|
||||
|
@ -5,8 +5,21 @@ import com.carrotsearch.ant.tasks.junit4.Pluralize
|
||||
import com.carrotsearch.ant.tasks.junit4.TestsSummaryEventListener
|
||||
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.base.Strings
|
||||
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.eventbus.Subscribe
|
||||
import com.carrotsearch.ant.tasks.junit4.events.*
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.*
|
||||
import com.carrotsearch.ant.tasks.junit4.events.EventType
|
||||
import com.carrotsearch.ant.tasks.junit4.events.IEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.IStreamEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.SuiteStartedEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.TestFinishedEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedQuitEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedResultEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedStartEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedSuiteResultEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedSuiteStartedEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedTestResultEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.ChildBootstrap
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.HeartBeatEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.PartialOutputEvent
|
||||
import com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus
|
||||
import com.carrotsearch.ant.tasks.junit4.events.mirrors.FailureMirror
|
||||
import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener
|
||||
import com.carrotsearch.ant.tasks.junit4.listeners.StackTraceFilter
|
||||
@ -15,16 +28,17 @@ import org.gradle.api.logging.LogLevel
|
||||
import org.gradle.api.logging.Logger
|
||||
import org.junit.runner.Description
|
||||
|
||||
import javax.sound.sampled.AudioSystem
|
||||
import javax.sound.sampled.Clip
|
||||
import javax.sound.sampled.Line
|
||||
import javax.sound.sampled.LineEvent
|
||||
import javax.sound.sampled.LineListener
|
||||
import java.util.concurrent.atomic.AtomicBoolean
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
|
||||
import javax.sound.sampled.AudioSystem;
|
||||
import javax.sound.sampled.Clip;
|
||||
import javax.sound.sampled.Line;
|
||||
import javax.sound.sampled.LineEvent;
|
||||
import javax.sound.sampled.LineListener;
|
||||
|
||||
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.*
|
||||
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDescription
|
||||
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds
|
||||
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatTime
|
||||
import static com.carrotsearch.gradle.junit4.TestLoggingConfiguration.OutputMode
|
||||
|
||||
class TestReportLogger extends TestsSummaryEventListener implements AggregatedEventListener {
|
||||
|
@ -0,0 +1,98 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
import org.apache.tools.ant.BuildListener
|
||||
import org.apache.tools.ant.BuildLogger
|
||||
import org.apache.tools.ant.DefaultLogger
|
||||
import org.apache.tools.ant.Project
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
|
||||
import java.nio.charset.Charset
|
||||
|
||||
/**
|
||||
* A task which will run ant commands.
|
||||
*
|
||||
* Logging for the task is customizable for subclasses by overriding makeLogger.
|
||||
*/
|
||||
public abstract class AntTask extends DefaultTask {
|
||||
|
||||
/**
|
||||
* A buffer that will contain the output of the ant code run,
|
||||
* if the output was not already written directly to stdout.
|
||||
*/
|
||||
public final ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream()
|
||||
|
||||
@TaskAction
|
||||
final void executeTask() {
|
||||
AntBuilder ant = new AntBuilder()
|
||||
|
||||
// remove existing loggers, we add our own
|
||||
List<BuildLogger> toRemove = new ArrayList<>();
|
||||
for (BuildListener listener : ant.project.getBuildListeners()) {
|
||||
if (listener instanceof BuildLogger) {
|
||||
toRemove.add(listener);
|
||||
}
|
||||
}
|
||||
for (BuildLogger listener : toRemove) {
|
||||
ant.project.removeBuildListener(listener)
|
||||
}
|
||||
|
||||
final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : Project.MSG_INFO
|
||||
final PrintStream stream = useStdout() ? System.out : new PrintStream(outputBuffer, true, Charset.defaultCharset().name())
|
||||
BuildLogger antLogger = makeLogger(stream, outputLevel)
|
||||
|
||||
ant.project.addBuildListener(antLogger)
|
||||
try {
|
||||
runAnt(ant)
|
||||
} catch (Exception e) {
|
||||
// ant failed, so see if we have buffered output to emit, then rethrow the failure
|
||||
String buffer = outputBuffer.toString()
|
||||
if (buffer.isEmpty() == false) {
|
||||
logger.error("=== Ant output ===\n${buffer}")
|
||||
}
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
/** Runs the doAnt closure. This can be overridden by subclasses instead of having to set a closure. */
|
||||
protected abstract void runAnt(AntBuilder ant)
|
||||
|
||||
/** Create the logger the ant runner will use, with the given stream for error/output. */
|
||||
protected BuildLogger makeLogger(PrintStream stream, int outputLevel) {
|
||||
return new DefaultLogger(
|
||||
errorPrintStream: stream,
|
||||
outputPrintStream: stream,
|
||||
messageOutputLevel: outputLevel)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the ant logger should write to stdout, or false if to the buffer.
|
||||
* The default implementation writes to the buffer when gradle info logging is disabled.
|
||||
*/
|
||||
protected boolean useStdout() {
|
||||
return logger.isInfoEnabled()
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -18,22 +18,30 @@
|
||||
*/
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
import org.gradle.process.ExecResult
|
||||
|
||||
import java.time.ZonedDateTime
|
||||
import java.time.ZoneOffset
|
||||
|
||||
import nebula.plugin.extraconfigurations.ProvidedBasePlugin
|
||||
import org.elasticsearch.gradle.precommit.PrecommitTasks
|
||||
import org.gradle.api.*
|
||||
import org.gradle.api.artifacts.*
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.JavaVersion
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.XmlProvider
|
||||
import org.gradle.api.artifacts.Configuration
|
||||
import org.gradle.api.artifacts.ModuleDependency
|
||||
import org.gradle.api.artifacts.ModuleVersionIdentifier
|
||||
import org.gradle.api.artifacts.ProjectDependency
|
||||
import org.gradle.api.artifacts.ResolvedArtifact
|
||||
import org.gradle.api.artifacts.dsl.RepositoryHandler
|
||||
import org.gradle.api.artifacts.maven.MavenPom
|
||||
import org.gradle.api.tasks.bundling.Jar
|
||||
import org.gradle.api.tasks.compile.JavaCompile
|
||||
import org.gradle.internal.jvm.Jvm
|
||||
import org.gradle.process.ExecResult
|
||||
import org.gradle.util.GradleVersion
|
||||
|
||||
import java.time.ZoneOffset
|
||||
import java.time.ZonedDateTime
|
||||
|
||||
/**
|
||||
* Encapsulates build configuration for elasticsearch projects.
|
||||
*/
|
||||
|
@ -19,9 +19,10 @@
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.tasks.*
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.internal.nativeintegration.filesystem.Chmod
|
||||
import java.io.File
|
||||
|
||||
import javax.inject.Inject
|
||||
|
||||
/**
|
||||
|
@ -19,8 +19,9 @@
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.tasks.*
|
||||
import java.io.File
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.OutputFile
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
|
||||
/**
|
||||
* Creates a file and sets it contents to something.
|
||||
|
@ -22,7 +22,7 @@ import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||
import org.elasticsearch.gradle.test.RunTask
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.artifacts.Dependency
|
||||
import org.gradle.api.tasks.SourceSet
|
||||
import org.gradle.api.tasks.bundling.Zip
|
||||
|
||||
@ -101,6 +101,11 @@ public class PluginBuildPlugin extends BuildPlugin {
|
||||
from pluginMetadata // metadata (eg custom security policy)
|
||||
from project.jar // this plugin's jar
|
||||
from project.configurations.runtime - project.configurations.provided // the dep jars
|
||||
// hack just for slf4j, in case it is "upgrade" from provided to compile,
|
||||
// since it is not actually provided in distributions
|
||||
from project.configurations.runtime.fileCollection { Dependency dep ->
|
||||
return dep.name == 'slf4j-api' && project.configurations.compile.dependencies.contains(dep)
|
||||
}
|
||||
// extra files for the plugin to go into the zip
|
||||
from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging
|
||||
from('src/main') {
|
||||
|
@ -20,7 +20,6 @@ package org.elasticsearch.gradle.plugin
|
||||
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.Optional
|
||||
|
||||
/**
|
||||
* A container for plugin properties that will be written to the plugin descriptor, for easy
|
||||
|
@ -18,7 +18,9 @@
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import org.gradle.api.*
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputDirectory
|
||||
|
@ -61,6 +61,7 @@ public class ForbiddenPatternsTask extends DefaultTask {
|
||||
// add mandatory rules
|
||||
patterns.put('nocommit', /nocommit/)
|
||||
patterns.put('tab', /\t/)
|
||||
patterns.put('wildcard imports', /^\s*import.*\.\*/)
|
||||
|
||||
inputs.property("excludes", filesFilter.excludes)
|
||||
inputs.property("rules", patterns)
|
||||
|
@ -0,0 +1,122 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import org.apache.rat.anttasks.Report
|
||||
import org.apache.rat.anttasks.SubstringLicenseMatcher
|
||||
import org.apache.rat.license.SimpleLicenseFamily
|
||||
import org.elasticsearch.gradle.AntTask
|
||||
import org.gradle.api.tasks.SourceSet
|
||||
|
||||
import java.nio.file.Files
|
||||
|
||||
/**
|
||||
* Checks files for license headers.
|
||||
* <p>
|
||||
* This is a port of the apache lucene check
|
||||
*/
|
||||
public class LicenseHeadersTask extends AntTask {
|
||||
|
||||
LicenseHeadersTask() {
|
||||
description = "Checks sources for missing, incorrect, or unacceptable license headers"
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void runAnt(AntBuilder ant) {
|
||||
ant.project.addTaskDefinition('ratReport', Report)
|
||||
ant.project.addDataTypeDefinition('substringMatcher', SubstringLicenseMatcher)
|
||||
ant.project.addDataTypeDefinition('approvedLicense', SimpleLicenseFamily)
|
||||
|
||||
// create a file for the log to go to under reports/
|
||||
File reportDir = new File(project.buildDir, "reports/licenseHeaders")
|
||||
reportDir.mkdirs()
|
||||
File reportFile = new File(reportDir, "rat.log")
|
||||
Files.deleteIfExists(reportFile.toPath())
|
||||
|
||||
// run rat, going to the file
|
||||
ant.ratReport(reportFile: reportFile.absolutePath, addDefaultLicenseMatchers: true) {
|
||||
// checks all the java sources (allJava)
|
||||
for (SourceSet set : project.sourceSets) {
|
||||
for (File dir : set.allJava.srcDirs) {
|
||||
// sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main...
|
||||
if (dir.exists()) {
|
||||
ant.fileset(dir: dir)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// BSD 4-clause stuff (is disallowed below)
|
||||
// we keep this here, in case someone adds BSD code for some reason, it should never be allowed.
|
||||
substringMatcher(licenseFamilyCategory: "BSD4 ",
|
||||
licenseFamilyName: "Original BSD License (with advertising clause)") {
|
||||
pattern(substring: "All advertising materials")
|
||||
}
|
||||
|
||||
// Apache
|
||||
substringMatcher(licenseFamilyCategory: "AL ",
|
||||
licenseFamilyName: "Apache") {
|
||||
// Apache license (ES)
|
||||
pattern(substring: "Licensed to Elasticsearch under one or more contributor")
|
||||
// Apache license (ASF)
|
||||
pattern(substring: "Licensed to the Apache Software Foundation (ASF) under")
|
||||
// this is the old-school one under some files
|
||||
pattern(substring: "Licensed under the Apache License, Version 2.0 (the \"License\")")
|
||||
}
|
||||
|
||||
// Generated resources
|
||||
substringMatcher(licenseFamilyCategory: "GEN ",
|
||||
licenseFamilyName: "Generated") {
|
||||
// parsers generated by antlr
|
||||
pattern(substring: "ANTLR GENERATED CODE")
|
||||
}
|
||||
|
||||
// approved categories
|
||||
approvedLicense(familyName: "Apache")
|
||||
approvedLicense(familyName: "Generated")
|
||||
}
|
||||
|
||||
// check the license file for any errors, this should be fast.
|
||||
boolean zeroUnknownLicenses = false
|
||||
boolean foundProblemsWithFiles = false
|
||||
reportFile.eachLine('UTF-8') { line ->
|
||||
if (line.startsWith("0 Unknown Licenses")) {
|
||||
zeroUnknownLicenses = true
|
||||
}
|
||||
|
||||
if (line.startsWith(" !")) {
|
||||
foundProblemsWithFiles = true
|
||||
}
|
||||
}
|
||||
|
||||
if (zeroUnknownLicenses == false || foundProblemsWithFiles) {
|
||||
// print the unapproved license section, usually its all you need to fix problems.
|
||||
int sectionNumber = 0
|
||||
reportFile.eachLine('UTF-8') { line ->
|
||||
if (line.startsWith("*******************************")) {
|
||||
sectionNumber++
|
||||
} else {
|
||||
if (sectionNumber == 2) {
|
||||
logger.error(line)
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new IllegalStateException("License header problems were found! Full details: " + reportFile.absolutePath)
|
||||
}
|
||||
}
|
||||
}
|
@ -34,6 +34,7 @@ class PrecommitTasks {
|
||||
List<Task> precommitTasks = [
|
||||
configureForbiddenApis(project),
|
||||
project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class),
|
||||
project.tasks.create('licenseHeaders', LicenseHeadersTask.class),
|
||||
project.tasks.create('jarHell', JarHellTask.class),
|
||||
project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)]
|
||||
|
||||
|
@ -18,24 +18,23 @@
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import java.nio.file.Files
|
||||
import org.apache.tools.ant.BuildLogger
|
||||
import org.apache.tools.ant.DefaultLogger
|
||||
import org.apache.tools.ant.Project
|
||||
import org.elasticsearch.gradle.AntTask
|
||||
import org.gradle.api.artifacts.Configuration
|
||||
import org.gradle.api.file.FileCollection
|
||||
|
||||
import java.nio.file.FileVisitResult
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.SimpleFileVisitor
|
||||
import java.nio.file.attribute.BasicFileAttributes
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.artifacts.UnknownConfigurationException
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
|
||||
import org.apache.tools.ant.BuildLogger
|
||||
import org.apache.tools.ant.Project
|
||||
|
||||
/**
|
||||
* Basic static checking to keep tabs on third party JARs
|
||||
*/
|
||||
public class ThirdPartyAuditTask extends DefaultTask {
|
||||
public class ThirdPartyAuditTask extends AntTask {
|
||||
|
||||
// true to be lenient about MISSING CLASSES
|
||||
private boolean missingClasses;
|
||||
@ -84,38 +83,37 @@ public class ThirdPartyAuditTask extends DefaultTask {
|
||||
return excludes;
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void check() {
|
||||
AntBuilder ant = new AntBuilder()
|
||||
@Override
|
||||
protected BuildLogger makeLogger(PrintStream stream, int outputLevel) {
|
||||
return new DefaultLogger(
|
||||
errorPrintStream: stream,
|
||||
outputPrintStream: stream,
|
||||
// ignore passed in outputLevel for now, until we are filtering warning messages
|
||||
messageOutputLevel: Project.MSG_ERR)
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void runAnt(AntBuilder ant) {
|
||||
ant.project.addTaskDefinition('thirdPartyAudit', de.thetaphi.forbiddenapis.ant.AntTask)
|
||||
|
||||
// we are noisy for many reasons, working around performance problems with forbidden-apis, dealing
|
||||
// with warnings about missing classes, etc. so we use our own "quiet" AntBuilder
|
||||
ant.project.buildListeners.each { listener ->
|
||||
if (listener instanceof BuildLogger) {
|
||||
listener.messageOutputLevel = Project.MSG_ERR;
|
||||
}
|
||||
};
|
||||
|
||||
// we only want third party dependencies.
|
||||
FileCollection jars = project.configurations.testCompile.fileCollection({ dependency ->
|
||||
FileCollection jars = project.configurations.testCompile.fileCollection({ dependency ->
|
||||
dependency.group.startsWith("org.elasticsearch") == false
|
||||
})
|
||||
|
||||
|
||||
// we don't want provided dependencies, which we have already scanned. e.g. don't
|
||||
// scan ES core's dependencies for every single plugin
|
||||
try {
|
||||
jars -= project.configurations.getByName("provided")
|
||||
} catch (UnknownConfigurationException ignored) {}
|
||||
|
||||
Configuration provided = project.configurations.findByName('provided')
|
||||
if (provided != null) {
|
||||
jars -= provided
|
||||
}
|
||||
|
||||
// no dependencies matched, we are done
|
||||
if (jars.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
ant.taskdef(name: "thirdPartyAudit",
|
||||
classname: "de.thetaphi.forbiddenapis.ant.AntTask",
|
||||
classpath: project.configurations.forbiddenApis.asPath)
|
||||
|
||||
|
||||
|
||||
// print which jars we are going to scan, always
|
||||
// this is not the time to try to be succinct! Forbidden will print plenty on its own!
|
||||
Set<String> names = new HashSet<>()
|
||||
@ -123,26 +121,26 @@ public class ThirdPartyAuditTask extends DefaultTask {
|
||||
names.add(jar.getName())
|
||||
}
|
||||
logger.error("[thirdPartyAudit] Scanning: " + names)
|
||||
|
||||
|
||||
// warn that classes are missing
|
||||
// TODO: move these to excludes list!
|
||||
if (missingClasses) {
|
||||
logger.warn("[thirdPartyAudit] WARNING: CLASSES ARE MISSING! Expect NoClassDefFoundError in bug reports from users!")
|
||||
}
|
||||
|
||||
// TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first,
|
||||
|
||||
// TODO: forbidden-apis + zipfileset gives O(n^2) behavior unless we dump to a tmpdir first,
|
||||
// and then remove our temp dir afterwards. don't complain: try it yourself.
|
||||
// we don't use gradle temp dir handling, just google it, or try it yourself.
|
||||
|
||||
|
||||
File tmpDir = new File(project.buildDir, 'tmp/thirdPartyAudit')
|
||||
|
||||
|
||||
// clean up any previous mess (if we failed), then unzip everything to one directory
|
||||
ant.delete(dir: tmpDir.getAbsolutePath())
|
||||
tmpDir.mkdirs()
|
||||
for (File jar : jars) {
|
||||
ant.unzip(src: jar.getAbsolutePath(), dest: tmpDir.getAbsolutePath())
|
||||
}
|
||||
|
||||
|
||||
// convert exclusion class names to binary file names
|
||||
String[] excludedFiles = new String[excludes.length];
|
||||
for (int i = 0; i < excludes.length; i++) {
|
||||
@ -152,12 +150,12 @@ public class ThirdPartyAuditTask extends DefaultTask {
|
||||
throw new IllegalStateException("bogus thirdPartyAudit exclusion: '" + excludes[i] + "', not found in any dependency")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// jarHellReprise
|
||||
checkSheistyClasses(tmpDir.toPath(), new HashSet<>(Arrays.asList(excludedFiles)));
|
||||
|
||||
ant.thirdPartyAudit(internalRuntimeForbidden: true,
|
||||
failOnUnsupportedJava: false,
|
||||
|
||||
ant.thirdPartyAudit(internalRuntimeForbidden: true,
|
||||
failOnUnsupportedJava: false,
|
||||
failOnMissingClasses: !missingClasses,
|
||||
classpath: project.configurations.testCompile.asPath) {
|
||||
fileset(dir: tmpDir, excludes: excludedFiles.join(','))
|
||||
|
@ -23,11 +23,18 @@ import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
||||
import org.gradle.api.*
|
||||
import org.gradle.api.AntBuilder
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.artifacts.Configuration
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.logging.Logger
|
||||
import org.gradle.api.tasks.*
|
||||
import org.gradle.api.tasks.Copy
|
||||
import org.gradle.api.tasks.Delete
|
||||
import org.gradle.api.tasks.Exec
|
||||
|
||||
import java.nio.file.Paths
|
||||
|
||||
|
@ -0,0 +1,287 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.test
|
||||
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.elasticsearch.gradle.AntTask
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.tasks.Exec
|
||||
import org.gradle.api.tasks.Input
|
||||
|
||||
/**
|
||||
* A fixture for integration tests which runs in a separate process.
|
||||
*/
|
||||
public class Fixture extends AntTask {
|
||||
|
||||
/** The path to the executable that starts the fixture. */
|
||||
@Input
|
||||
String executable
|
||||
|
||||
private final List<Object> arguments = new ArrayList<>()
|
||||
|
||||
@Input
|
||||
public void args(Object... args) {
|
||||
arguments.addAll(args)
|
||||
}
|
||||
|
||||
/**
|
||||
* Environment variables for the fixture process. The value can be any object, which
|
||||
* will have toString() called at execution time.
|
||||
*/
|
||||
private final Map<String, Object> environment = new HashMap<>()
|
||||
|
||||
@Input
|
||||
public void env(String key, Object value) {
|
||||
environment.put(key, value)
|
||||
}
|
||||
|
||||
/** A flag to indicate whether the command should be executed from a shell. */
|
||||
@Input
|
||||
boolean useShell = false
|
||||
|
||||
/**
|
||||
* A flag to indicate whether the fixture should be run in the foreground, or spawned.
|
||||
* It is protected so subclasses can override (eg RunTask).
|
||||
*/
|
||||
protected boolean spawn = true
|
||||
|
||||
/**
|
||||
* A closure to call before the fixture is considered ready. The closure is passed the fixture object,
|
||||
* as well as a groovy AntBuilder, to enable running ant condition checks. The default wait
|
||||
* condition is for http on the http port.
|
||||
*/
|
||||
@Input
|
||||
Closure waitCondition = { Fixture fixture, AntBuilder ant ->
|
||||
File tmpFile = new File(fixture.cwd, 'wait.success')
|
||||
ant.get(src: "http://${fixture.addressAndPort}",
|
||||
dest: tmpFile.toString(),
|
||||
ignoreerrors: true, // do not fail on error, so logging information can be flushed
|
||||
retries: 10)
|
||||
return tmpFile.exists()
|
||||
}
|
||||
|
||||
/** A task which will stop this fixture. This should be used as a finalizedBy for any tasks that use the fixture. */
|
||||
public final Task stopTask
|
||||
|
||||
public Fixture() {
|
||||
stopTask = createStopTask()
|
||||
finalizedBy(stopTask)
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void runAnt(AntBuilder ant) {
|
||||
project.delete(baseDir) // reset everything
|
||||
cwd.mkdirs()
|
||||
final String realExecutable
|
||||
final List<Object> realArgs = new ArrayList<>()
|
||||
final Map<String, Object> realEnv = environment
|
||||
// We need to choose which executable we are using. In shell mode, or when we
|
||||
// are spawning and thus using the wrapper script, the executable is the shell.
|
||||
if (useShell || spawn) {
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
realExecutable = 'cmd'
|
||||
realArgs.add('/C')
|
||||
realArgs.add('"') // quote the entire command
|
||||
} else {
|
||||
realExecutable = 'sh'
|
||||
}
|
||||
} else {
|
||||
realExecutable = executable
|
||||
realArgs.addAll(arguments)
|
||||
}
|
||||
if (spawn) {
|
||||
writeWrapperScript(executable)
|
||||
realArgs.add(wrapperScript)
|
||||
realArgs.addAll(arguments)
|
||||
}
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS) && (useShell || spawn)) {
|
||||
realArgs.add('"')
|
||||
}
|
||||
commandString.eachLine { line -> logger.info(line) }
|
||||
|
||||
ant.exec(executable: realExecutable, spawn: spawn, dir: cwd, taskname: name) {
|
||||
realEnv.each { key, value -> env(key: key, value: value) }
|
||||
realArgs.each { arg(value: it) }
|
||||
}
|
||||
|
||||
String failedProp = "failed${name}"
|
||||
// first wait for resources, or the failure marker from the wrapper script
|
||||
ant.waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: failedProp) {
|
||||
or {
|
||||
resourceexists {
|
||||
file(file: failureMarker.toString())
|
||||
}
|
||||
and {
|
||||
resourceexists {
|
||||
file(file: pidFile.toString())
|
||||
}
|
||||
resourceexists {
|
||||
file(file: portsFile.toString())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ant.project.getProperty(failedProp) || failureMarker.exists()) {
|
||||
fail("Failed to start ${name}")
|
||||
}
|
||||
|
||||
// the process is started (has a pid) and is bound to a network interface
|
||||
// so now wait undil the waitCondition has been met
|
||||
// TODO: change this to a loop?
|
||||
boolean success
|
||||
try {
|
||||
success = waitCondition(this, ant) == false
|
||||
} catch (Exception e) {
|
||||
String msg = "Wait condition caught exception for ${name}"
|
||||
logger.error(msg, e)
|
||||
fail(msg, e)
|
||||
}
|
||||
if (success == false) {
|
||||
fail("Wait condition failed for ${name}")
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns a debug string used to log information about how the fixture was run. */
|
||||
protected String getCommandString() {
|
||||
String commandString = "\n${name} configuration:\n"
|
||||
commandString += "-----------------------------------------\n"
|
||||
commandString += " cwd: ${cwd}\n"
|
||||
commandString += " command: ${executable} ${arguments.join(' ')}\n"
|
||||
commandString += ' environment:\n'
|
||||
environment.each { k, v -> commandString += " ${k}: ${v}\n" }
|
||||
if (spawn) {
|
||||
commandString += "\n [${wrapperScript.name}]\n"
|
||||
wrapperScript.eachLine('UTF-8', { line -> commandString += " ${line}\n"})
|
||||
}
|
||||
return commandString
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a script to run the real executable, so that stdout/stderr can be captured.
|
||||
* TODO: this could be removed if we do use our own ProcessBuilder and pump output from the process
|
||||
*/
|
||||
private void writeWrapperScript(String executable) {
|
||||
wrapperScript.parentFile.mkdirs()
|
||||
String argsPasser = '"$@"'
|
||||
String exitMarker = "; if [ \$? != 0 ]; then touch run.failed; fi"
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
argsPasser = '%*'
|
||||
exitMarker = "\r\n if \"%errorlevel%\" neq \"0\" ( type nul >> run.failed )"
|
||||
}
|
||||
wrapperScript.setText("\"${executable}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8')
|
||||
}
|
||||
|
||||
/** Fail the build with the given message, and logging relevant info*/
|
||||
private void fail(String msg, Exception... suppressed) {
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
// We already log the command at info level. No need to do it twice.
|
||||
commandString.eachLine { line -> logger.error(line) }
|
||||
}
|
||||
logger.error("${name} output:")
|
||||
logger.error("-----------------------------------------")
|
||||
logger.error(" failure marker exists: ${failureMarker.exists()}")
|
||||
logger.error(" pid file exists: ${pidFile.exists()}")
|
||||
logger.error(" ports file exists: ${portsFile.exists()}")
|
||||
// also dump the log file for the startup script (which will include ES logging output to stdout)
|
||||
if (runLog.exists()) {
|
||||
logger.error("\n [log]")
|
||||
runLog.eachLine { line -> logger.error(" ${line}") }
|
||||
}
|
||||
logger.error("-----------------------------------------")
|
||||
GradleException toThrow = new GradleException(msg)
|
||||
for (Exception e : suppressed) {
|
||||
toThrow.addSuppressed(e)
|
||||
}
|
||||
throw toThrow
|
||||
}
|
||||
|
||||
/** Adds a task to kill an elasticsearch node with the given pidfile */
|
||||
private Task createStopTask() {
|
||||
final Fixture fixture = this
|
||||
final Object pid = "${ -> fixture.pid }"
|
||||
Exec stop = project.tasks.create(name: "${name}#stop", type: LoggedExec)
|
||||
stop.onlyIf { fixture.pidFile.exists() }
|
||||
stop.doFirst {
|
||||
logger.info("Shutting down ${fixture.name} with pid ${pid}")
|
||||
}
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
stop.executable = 'Taskkill'
|
||||
stop.args('/PID', pid, '/F')
|
||||
} else {
|
||||
stop.executable = 'kill'
|
||||
stop.args('-9', pid)
|
||||
}
|
||||
stop.doLast {
|
||||
project.delete(fixture.pidFile)
|
||||
}
|
||||
return stop
|
||||
}
|
||||
|
||||
/**
|
||||
* A path relative to the build dir that all configuration and runtime files
|
||||
* will live in for this fixture
|
||||
*/
|
||||
protected File getBaseDir() {
|
||||
return new File(project.buildDir, "fixtures/${name}")
|
||||
}
|
||||
|
||||
/** Returns the working directory for the process. Defaults to "cwd" inside baseDir. */
|
||||
protected File getCwd() {
|
||||
return new File(baseDir, 'cwd')
|
||||
}
|
||||
|
||||
/** Returns the file the process writes its pid to. Defaults to "pid" inside baseDir. */
|
||||
protected File getPidFile() {
|
||||
return new File(baseDir, 'pid')
|
||||
}
|
||||
|
||||
/** Reads the pid file and returns the process' pid */
|
||||
public int getPid() {
|
||||
return Integer.parseInt(pidFile.getText('UTF-8').trim())
|
||||
}
|
||||
|
||||
/** Returns the file the process writes its bound ports to. Defaults to "ports" inside baseDir. */
|
||||
protected File getPortsFile() {
|
||||
return new File(baseDir, 'ports')
|
||||
}
|
||||
|
||||
/** Returns an address and port suitable for a uri to connect to this node over http */
|
||||
public String getAddressAndPort() {
|
||||
return portsFile.readLines("UTF-8").get(0)
|
||||
}
|
||||
|
||||
/** Returns a file that wraps around the actual command when {@code spawn == true}. */
|
||||
protected File getWrapperScript() {
|
||||
return new File(cwd, Os.isFamily(Os.FAMILY_WINDOWS) ? 'run.bat' : 'run')
|
||||
}
|
||||
|
||||
/** Returns a file that the wrapper script writes when the command failed. */
|
||||
protected File getFailureMarker() {
|
||||
return new File(cwd, 'run.failed')
|
||||
}
|
||||
|
||||
/** Returns a file that the wrapper script writes when the command failed. */
|
||||
protected File getRunLog() {
|
||||
return new File(cwd, 'run.log')
|
||||
}
|
||||
}
|
@ -20,7 +20,7 @@ package org.elasticsearch.gradle.test
|
||||
|
||||
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.internal.tasks.options.Option
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
@ -82,4 +82,25 @@ public class RestIntegTestTask extends RandomizedTestingTask {
|
||||
public ClusterConfiguration getCluster() {
|
||||
return clusterConfig
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task dependsOn(Object... dependencies) {
|
||||
super.dependsOn(dependencies)
|
||||
for (Object dependency : dependencies) {
|
||||
if (dependency instanceof Fixture) {
|
||||
finalizedBy(((Fixture)dependency).stopTask)
|
||||
}
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDependsOn(Iterable<?> dependencies) {
|
||||
super.setDependsOn(dependencies)
|
||||
for (Object dependency : dependencies) {
|
||||
if (dependency instanceof Fixture) {
|
||||
finalizedBy(((Fixture)dependency).stopTask)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
package org.elasticsearch.gradle.test
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.internal.tasks.options.Option
|
||||
import org.gradle.util.ConfigureUtil
|
||||
|
@ -27,7 +27,6 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.plugins.ide.eclipse.model.EclipseClasspath
|
||||
|
||||
/** Configures the build to have a rest integration test. */
|
||||
public class StandaloneTestBasePlugin implements Plugin<Project> {
|
||||
|
@ -19,8 +19,7 @@
|
||||
package org.elasticsearch.gradle.vagrant
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.tasks.*
|
||||
import org.gradle.logging.ProgressLogger
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.logging.ProgressLoggerFactory
|
||||
import org.gradle.process.internal.ExecAction
|
||||
import org.gradle.process.internal.ExecActionFactory
|
||||
|
@ -19,8 +19,7 @@
|
||||
package org.elasticsearch.gradle.vagrant
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.tasks.*
|
||||
import org.gradle.logging.ProgressLogger
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.logging.ProgressLoggerFactory
|
||||
import org.gradle.process.internal.ExecAction
|
||||
import org.gradle.process.internal.ExecActionFactory
|
||||
|
@ -1,5 +1,5 @@
|
||||
elasticsearch = 3.0.0-SNAPSHOT
|
||||
lucene = 5.5.0-snapshot-1719088
|
||||
lucene = 5.5.0-snapshot-1721183
|
||||
|
||||
# optional dependencies
|
||||
spatial4j = 0.5
|
||||
|
@ -17,9 +17,9 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.elasticsearch.gradle.test.RestSpecHack
|
||||
|
||||
apply plugin: 'elasticsearch.build'
|
||||
apply plugin: 'com.bmuschko.nexus'
|
||||
|
@ -18,9 +18,19 @@
|
||||
*/
|
||||
package org.apache.lucene.queries;
|
||||
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexReaderContext;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermContext;
|
||||
import org.apache.lucene.index.TermState;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.InPlaceMergeSorter;
|
||||
import org.apache.lucene.util.ToStringUtils;
|
||||
|
@ -23,7 +23,14 @@ import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.MultiPhraseQuery;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.automaton.RegExp;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
@ -35,7 +42,12 @@ import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
|
||||
|
@ -22,7 +22,11 @@ package org.apache.lucene.search.vectorhighlight;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.BlendedTermQuery;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.MultiPhraseQuery;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery;
|
||||
@ -72,10 +76,10 @@ public class CustomFieldQuery extends FieldQuery {
|
||||
super.flatten(sourceQuery, reader, flatQueries, boost);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, List<Term[]> terms, int[] pos, IndexReader reader, Collection<Query> flatQueries) throws IOException {
|
||||
if (currentPos == 0) {
|
||||
// if we have more than 16 terms
|
||||
// if we have more than 16 terms
|
||||
int numTerms = 0;
|
||||
for (Term[] currentPosTerm : terms) {
|
||||
numTerms += currentPosTerm.length;
|
||||
@ -83,7 +87,7 @@ public class CustomFieldQuery extends FieldQuery {
|
||||
if (numTerms > 16) {
|
||||
for (Term[] currentPosTerm : terms) {
|
||||
for (Term term : currentPosTerm) {
|
||||
super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost());
|
||||
super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost());
|
||||
}
|
||||
}
|
||||
return;
|
||||
|
@ -30,7 +30,13 @@ import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
|
@ -279,6 +279,8 @@ public class Version {
|
||||
public static final Version V_2_1_2 = new Version(V_2_1_2_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_1);
|
||||
public static final int V_2_2_0_ID = 2020099;
|
||||
public static final Version V_2_2_0 = new Version(V_2_2_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0);
|
||||
public static final int V_2_3_0_ID = 2030099;
|
||||
public static final Version V_2_3_0 = new Version(V_2_3_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0);
|
||||
public static final int V_3_0_0_ID = 3000099;
|
||||
public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_5_0);
|
||||
public static final Version CURRENT = V_3_0_0;
|
||||
@ -295,6 +297,8 @@ public class Version {
|
||||
switch (id) {
|
||||
case V_3_0_0_ID:
|
||||
return V_3_0_0;
|
||||
case V_2_3_0_ID:
|
||||
return V_2_3_0;
|
||||
case V_2_2_0_ID:
|
||||
return V_2_2_0;
|
||||
case V_2_1_2_ID:
|
||||
|
@ -19,8 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
import java.util.concurrent.Future;
|
||||
|
@ -58,6 +58,8 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction;
|
||||
import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction;
|
||||
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.TransportRenderSearchTemplateAction;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction;
|
||||
import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction;
|
||||
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistAction;
|
||||
@ -79,7 +81,9 @@ import org.elasticsearch.action.admin.indices.exists.indices.TransportIndicesExi
|
||||
import org.elasticsearch.action.admin.indices.exists.types.TransportTypesExistsAction;
|
||||
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsAction;
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushAction;
|
||||
import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction;
|
||||
import org.elasticsearch.action.admin.indices.flush.TransportFlushAction;
|
||||
import org.elasticsearch.action.admin.indices.flush.TransportSyncedFlushAction;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.TransportForceMergeAction;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexAction;
|
||||
@ -107,8 +111,6 @@ import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction;
|
||||
import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
|
||||
import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction;
|
||||
import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction;
|
||||
import org.elasticsearch.action.admin.indices.flush.TransportSyncedFlushAction;
|
||||
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction;
|
||||
import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction;
|
||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction;
|
||||
@ -123,8 +125,6 @@ import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction;
|
||||
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsAction;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.TransportRenderSearchTemplateAction;
|
||||
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerAction;
|
||||
import org.elasticsearch.action.admin.indices.warmer.delete.TransportDeleteWarmerAction;
|
||||
import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersAction;
|
||||
|
@ -21,7 +21,6 @@ package org.elasticsearch.action;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.StatusToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -62,4 +62,12 @@ public interface DocumentRequest<T> extends IndicesRequest {
|
||||
* @return the Routing
|
||||
*/
|
||||
String routing();
|
||||
|
||||
|
||||
/**
|
||||
* Get the parent for this request
|
||||
* @return the Parent
|
||||
*/
|
||||
String parent();
|
||||
|
||||
}
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -23,7 +23,8 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.transport.*;
|
||||
import org.elasticsearch.transport.TransportRequestOptions;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
/**
|
||||
* A generic proxy that will execute the given action against a specific node.
|
||||
|
@ -21,7 +21,6 @@ package org.elasticsearch.action;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.HppcMaps;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
@ -52,4 +51,4 @@ public class UnavailableShardsException extends ElasticsearchException {
|
||||
public RestStatus status() {
|
||||
return RestStatus.SERVICE_UNAVAILABLE;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -22,9 +22,9 @@ package org.elasticsearch.action.admin.cluster.health;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.health.ClusterStateHealth;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.health.ClusterIndexHealth;
|
||||
import org.elasticsearch.cluster.health.ClusterStateHealth;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
@ -23,7 +23,11 @@ import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
|
||||
import org.elasticsearch.cluster.*;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ClusterStateObserver;
|
||||
import org.elasticsearch.cluster.ClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
|
@ -19,9 +19,7 @@
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.node.hotthreads;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder;
|
||||
import org.elasticsearch.client.ClusterAdminClient;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.action.admin.cluster.node.info;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -23,7 +23,9 @@ import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.*;
|
||||
import org.elasticsearch.transport.TransportChannel;
|
||||
import org.elasticsearch.transport.TransportRequestHandler;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
public final class TransportLivenessAction implements TransportRequestHandler<LivenessRequest> {
|
||||
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.action.admin.cluster.node.stats;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
@ -340,4 +339,4 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -24,11 +24,12 @@ import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
import org.elasticsearch.repositories.VerificationFailure;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Unregister repository response
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.reroute;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.routing.allocation.RoutingExplanations;
|
||||
|
@ -0,0 +1,127 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.settings;
|
||||
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.cluster.ClusterState.builder;
|
||||
|
||||
/**
|
||||
* Updates transient and persistent cluster state settings if there are any changes
|
||||
* due to the update.
|
||||
*/
|
||||
final class SettingsUpdater {
|
||||
final Settings.Builder transientUpdates = Settings.settingsBuilder();
|
||||
final Settings.Builder persistentUpdates = Settings.settingsBuilder();
|
||||
private final ClusterSettings clusterSettings;
|
||||
|
||||
SettingsUpdater(ClusterSettings clusterSettings) {
|
||||
this.clusterSettings = clusterSettings;
|
||||
}
|
||||
|
||||
synchronized Settings getTransientUpdates() {
|
||||
return transientUpdates.build();
|
||||
}
|
||||
|
||||
synchronized Settings getPersistentUpdate() {
|
||||
return persistentUpdates.build();
|
||||
}
|
||||
|
||||
synchronized ClusterState updateSettings(final ClusterState currentState, Settings transientToApply, Settings persistentToApply) {
|
||||
boolean changed = false;
|
||||
Settings.Builder transientSettings = Settings.settingsBuilder();
|
||||
transientSettings.put(currentState.metaData().transientSettings());
|
||||
changed |= apply(transientToApply, transientSettings, transientUpdates, "transient");
|
||||
|
||||
Settings.Builder persistentSettings = Settings.settingsBuilder();
|
||||
persistentSettings.put(currentState.metaData().persistentSettings());
|
||||
changed |= apply(persistentToApply, persistentSettings, persistentUpdates, "persistent");
|
||||
|
||||
if (!changed) {
|
||||
return currentState;
|
||||
}
|
||||
|
||||
MetaData.Builder metaData = MetaData.builder(currentState.metaData())
|
||||
.persistentSettings(persistentSettings.build())
|
||||
.transientSettings(transientSettings.build());
|
||||
|
||||
ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
|
||||
boolean updatedReadOnly = MetaData.SETTING_READ_ONLY_SETTING.get(metaData.persistentSettings()) || MetaData.SETTING_READ_ONLY_SETTING.get(metaData.transientSettings());
|
||||
if (updatedReadOnly) {
|
||||
blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK);
|
||||
} else {
|
||||
blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK);
|
||||
}
|
||||
ClusterState build = builder(currentState).metaData(metaData).blocks(blocks).build();
|
||||
Settings settings = build.metaData().settings();
|
||||
// now we try to apply things and if they are invalid we fail
|
||||
// this dryRun will validate & parse settings but won't actually apply them.
|
||||
clusterSettings.dryRun(settings);
|
||||
return build;
|
||||
}
|
||||
|
||||
private boolean apply(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) {
|
||||
boolean changed = false;
|
||||
final Set<String> toRemove = new HashSet<>();
|
||||
Settings.Builder settingsBuilder = Settings.settingsBuilder();
|
||||
for (Map.Entry<String, String> entry : toApply.getAsMap().entrySet()) {
|
||||
if (entry.getValue() == null) {
|
||||
toRemove.add(entry.getKey());
|
||||
} else if (clusterSettings.isLoggerSetting(entry.getKey()) || clusterSettings.hasDynamicSetting(entry.getKey())) {
|
||||
settingsBuilder.put(entry.getKey(), entry.getValue());
|
||||
updates.put(entry.getKey(), entry.getValue());
|
||||
changed = true;
|
||||
} else {
|
||||
throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable");
|
||||
}
|
||||
|
||||
}
|
||||
changed |= applyDeletes(toRemove, target);
|
||||
target.put(settingsBuilder.build());
|
||||
return changed;
|
||||
}
|
||||
|
||||
private final boolean applyDeletes(Set<String> deletes, Settings.Builder builder) {
|
||||
boolean changed = false;
|
||||
for (String entry : deletes) {
|
||||
Set<String> keysToRemove = new HashSet<>();
|
||||
Set<String> keySet = builder.internalMap().keySet();
|
||||
for (String key : keySet) {
|
||||
if (Regex.simpleMatch(entry, key)) {
|
||||
keysToRemove.add(key);
|
||||
}
|
||||
}
|
||||
for (String key : keysToRemove) {
|
||||
builder.remove(key);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
return changed;
|
||||
}
|
||||
}
|
@ -28,25 +28,19 @@ import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||
import org.elasticsearch.cluster.settings.ClusterDynamicSettings;
|
||||
import org.elasticsearch.cluster.settings.DynamicSettings;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.cluster.ClusterState.builder;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@ -54,15 +48,14 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct
|
||||
|
||||
private final AllocationService allocationService;
|
||||
|
||||
private final DynamicSettings dynamicSettings;
|
||||
private final ClusterSettings clusterSettings;
|
||||
|
||||
@Inject
|
||||
public TransportClusterUpdateSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
|
||||
AllocationService allocationService, @ClusterDynamicSettings DynamicSettings dynamicSettings,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
AllocationService allocationService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterSettings clusterSettings) {
|
||||
super(settings, ClusterUpdateSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, ClusterUpdateSettingsRequest::new);
|
||||
this.allocationService = allocationService;
|
||||
this.dynamicSettings = dynamicSettings;
|
||||
this.clusterSettings = clusterSettings;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -73,8 +66,8 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct
|
||||
@Override
|
||||
protected ClusterBlockException checkBlock(ClusterUpdateSettingsRequest request, ClusterState state) {
|
||||
// allow for dedicated changes to the metadata blocks, so we don't block those to allow to "re-enable" it
|
||||
if ((request.transientSettings().getAsMap().isEmpty() && request.persistentSettings().getAsMap().size() == 1 && request.persistentSettings().get(MetaData.SETTING_READ_ONLY) != null) ||
|
||||
request.persistentSettings().getAsMap().isEmpty() && request.transientSettings().getAsMap().size() == 1 && request.transientSettings().get(MetaData.SETTING_READ_ONLY) != null) {
|
||||
if ((request.transientSettings().getAsMap().isEmpty() && request.persistentSettings().getAsMap().size() == 1 && MetaData.SETTING_READ_ONLY_SETTING.exists(request.persistentSettings())) ||
|
||||
request.persistentSettings().getAsMap().isEmpty() && request.transientSettings().getAsMap().size() == 1 && MetaData.SETTING_READ_ONLY_SETTING.exists(request.transientSettings())) {
|
||||
return null;
|
||||
}
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
|
||||
@ -88,9 +81,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct
|
||||
|
||||
@Override
|
||||
protected void masterOperation(final ClusterUpdateSettingsRequest request, final ClusterState state, final ActionListener<ClusterUpdateSettingsResponse> listener) {
|
||||
final Settings.Builder transientUpdates = Settings.settingsBuilder();
|
||||
final Settings.Builder persistentUpdates = Settings.settingsBuilder();
|
||||
|
||||
final SettingsUpdater updater = new SettingsUpdater(clusterSettings);
|
||||
clusterService.submitStateUpdateTask("cluster_update_settings",
|
||||
new AckedClusterStateUpdateTask<ClusterUpdateSettingsResponse>(Priority.IMMEDIATE, request, listener) {
|
||||
|
||||
@ -98,7 +89,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct
|
||||
|
||||
@Override
|
||||
protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) {
|
||||
return new ClusterUpdateSettingsResponse(acknowledged, transientUpdates.build(), persistentUpdates.build());
|
||||
return new ClusterUpdateSettingsResponse(acknowledged, updater.getTransientUpdates(), updater.getPersistentUpdate());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -125,7 +116,7 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct
|
||||
// so we should *not* execute the reroute.
|
||||
if (!clusterService.state().nodes().localNodeMaster()) {
|
||||
logger.debug("Skipping reroute after cluster update settings, because node is no longer master");
|
||||
listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build()));
|
||||
listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, updater.getTransientUpdates(), updater.getPersistentUpdate()));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -145,13 +136,13 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct
|
||||
@Override
|
||||
//we return when the cluster reroute is acked or it times out but the acknowledged flag depends on whether the update settings was acknowledged
|
||||
protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) {
|
||||
return new ClusterUpdateSettingsResponse(updateSettingsAcked && acknowledged, transientUpdates.build(), persistentUpdates.build());
|
||||
return new ClusterUpdateSettingsResponse(updateSettingsAcked && acknowledged, updater.getTransientUpdates(), updater.getPersistentUpdate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNoLongerMaster(String source) {
|
||||
logger.debug("failed to preform reroute after cluster settings were updated - current node is no longer a master");
|
||||
listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build()));
|
||||
listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, updater.getTransientUpdates(), updater.getPersistentUpdate()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -181,58 +172,11 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct
|
||||
|
||||
@Override
|
||||
public ClusterState execute(final ClusterState currentState) {
|
||||
Settings.Builder transientSettings = Settings.settingsBuilder();
|
||||
transientSettings.put(currentState.metaData().transientSettings());
|
||||
for (Map.Entry<String, String> entry : request.transientSettings().getAsMap().entrySet()) {
|
||||
if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) {
|
||||
String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue(), clusterService.state());
|
||||
if (error == null) {
|
||||
transientSettings.put(entry.getKey(), entry.getValue());
|
||||
transientUpdates.put(entry.getKey(), entry.getValue());
|
||||
changed = true;
|
||||
} else {
|
||||
logger.warn("ignoring transient setting [{}], [{}]", entry.getKey(), error);
|
||||
}
|
||||
} else {
|
||||
logger.warn("ignoring transient setting [{}], not dynamically updateable", entry.getKey());
|
||||
}
|
||||
}
|
||||
|
||||
Settings.Builder persistentSettings = Settings.settingsBuilder();
|
||||
persistentSettings.put(currentState.metaData().persistentSettings());
|
||||
for (Map.Entry<String, String> entry : request.persistentSettings().getAsMap().entrySet()) {
|
||||
if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) {
|
||||
String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue(), clusterService.state());
|
||||
if (error == null) {
|
||||
persistentSettings.put(entry.getKey(), entry.getValue());
|
||||
persistentUpdates.put(entry.getKey(), entry.getValue());
|
||||
changed = true;
|
||||
} else {
|
||||
logger.warn("ignoring persistent setting [{}], [{}]", entry.getKey(), error);
|
||||
}
|
||||
} else {
|
||||
logger.warn("ignoring persistent setting [{}], not dynamically updateable", entry.getKey());
|
||||
}
|
||||
}
|
||||
|
||||
if (!changed) {
|
||||
return currentState;
|
||||
}
|
||||
|
||||
MetaData.Builder metaData = MetaData.builder(currentState.metaData())
|
||||
.persistentSettings(persistentSettings.build())
|
||||
.transientSettings(transientSettings.build());
|
||||
|
||||
ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks());
|
||||
boolean updatedReadOnly = metaData.persistentSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false) || metaData.transientSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false);
|
||||
if (updatedReadOnly) {
|
||||
blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK);
|
||||
} else {
|
||||
blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK);
|
||||
}
|
||||
|
||||
return builder(currentState).metaData(metaData).blocks(blocks).build();
|
||||
ClusterState clusterState = updater.updateSettings(currentState, request.transientSettings(), request.persistentSettings());
|
||||
changed = clusterState != currentState;
|
||||
return clusterState;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.snapshots.delete;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
|
||||
|
@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.status;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.action.admin.cluster.state;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
|
||||
|
@ -19,11 +19,11 @@
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.stats;
|
||||
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.ShardStats;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
@ -107,4 +107,4 @@ public class ClusterStatsNodeResponse extends BaseNodeResponse {
|
||||
ss.writeTo(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.plugins.PluginInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
@ -38,6 +37,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.plugins.PluginInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
|
@ -19,9 +19,9 @@
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.stats;
|
||||
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
@ -168,4 +168,4 @@ public class ClusterStatsResponse extends BaseNodesResponse<ClusterStatsNodeResp
|
||||
return "{ \"error\" : \"" + e.getMessage() + "\"}";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.stats;
|
||||
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||
@ -30,6 +29,7 @@ import org.elasticsearch.action.support.nodes.BaseNodeRequest;
|
||||
import org.elasticsearch.action.support.nodes.TransportNodesAction;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.health.ClusterStateHealth;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
@ -42,7 +42,13 @@ import org.elasticsearch.common.io.FastStringReader;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.analysis.*;
|
||||
import org.elasticsearch.index.analysis.AnalysisRegistry;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.analysis.CharFilterFactory;
|
||||
import org.elasticsearch.index.analysis.CustomAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.TokenizerFactory;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
@ -53,7 +59,13 @@ import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
/**
|
||||
* Transport action used to execute analyze requests
|
||||
|
@ -31,31 +31,36 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaDataIndexStateService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
/**
|
||||
* Close index action
|
||||
*/
|
||||
public class TransportCloseIndexAction extends TransportMasterNodeAction<CloseIndexRequest, CloseIndexResponse> implements NodeSettingsService.Listener {
|
||||
public class TransportCloseIndexAction extends TransportMasterNodeAction<CloseIndexRequest, CloseIndexResponse> {
|
||||
|
||||
private final MetaDataIndexStateService indexStateService;
|
||||
private final DestructiveOperations destructiveOperations;
|
||||
private volatile boolean closeIndexEnabled;
|
||||
public static final String SETTING_CLUSTER_INDICES_CLOSE_ENABLE = "cluster.indices.close.enable";
|
||||
public static final Setting<Boolean> CLUSTER_INDICES_CLOSE_ENABLE_SETTING = Setting.boolSetting("cluster.indices.close.enable", true, true, Setting.Scope.CLUSTER);
|
||||
|
||||
@Inject
|
||||
public TransportCloseIndexAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, MetaDataIndexStateService indexStateService,
|
||||
NodeSettingsService nodeSettingsService, ActionFilters actionFilters,
|
||||
ClusterSettings clusterSettings, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, DestructiveOperations destructiveOperations) {
|
||||
super(settings, CloseIndexAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, CloseIndexRequest::new);
|
||||
this.indexStateService = indexStateService;
|
||||
this.destructiveOperations = destructiveOperations;
|
||||
this.closeIndexEnabled = settings.getAsBoolean(SETTING_CLUSTER_INDICES_CLOSE_ENABLE, true);
|
||||
nodeSettingsService.addListener(this);
|
||||
this.closeIndexEnabled = CLUSTER_INDICES_CLOSE_ENABLE_SETTING.get(settings);
|
||||
clusterSettings.addSettingsUpdateConsumer(CLUSTER_INDICES_CLOSE_ENABLE_SETTING, this::setCloseIndexEnabled);
|
||||
}
|
||||
|
||||
private void setCloseIndexEnabled(boolean closeIndexEnabled) {
|
||||
this.closeIndexEnabled = closeIndexEnabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -73,7 +78,7 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction<CloseIn
|
||||
protected void doExecute(CloseIndexRequest request, ActionListener<CloseIndexResponse> listener) {
|
||||
destructiveOperations.failDestructive(request.indices());
|
||||
if (closeIndexEnabled == false) {
|
||||
throw new IllegalStateException("closing indices is disabled - set [" + SETTING_CLUSTER_INDICES_CLOSE_ENABLE + ": true] to enable it. NOTE: closed indices still consume a significant amount of diskspace");
|
||||
throw new IllegalStateException("closing indices is disabled - set [" + CLUSTER_INDICES_CLOSE_ENABLE_SETTING.getKey() + ": true] to enable it. NOTE: closed indices still consume a significant amount of diskspace");
|
||||
}
|
||||
super.doExecute(request, listener);
|
||||
}
|
||||
@ -104,13 +109,4 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction<CloseIn
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
final boolean enable = settings.getAsBoolean(SETTING_CLUSTER_INDICES_CLOSE_ENABLE, this.closeIndexEnabled);
|
||||
if (enable != this.closeIndexEnabled) {
|
||||
logger.info("updating [{}] from [{}] to [{}]", SETTING_CLUSTER_INDICES_CLOSE_ENABLE, this.closeIndexEnabled, enable);
|
||||
this.closeIndexEnabled = enable;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action.admin.indices.create;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
@ -43,6 +42,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
|
@ -31,7 +31,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
@ -45,8 +44,7 @@ public class TransportDeleteIndexAction extends TransportMasterNodeAction<Delete
|
||||
|
||||
@Inject
|
||||
public TransportDeleteIndexAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, MetaDataDeleteIndexService deleteIndexService,
|
||||
NodeSettingsService nodeSettingsService, ActionFilters actionFilters,
|
||||
ThreadPool threadPool, MetaDataDeleteIndexService deleteIndexService, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver, DestructiveOperations destructiveOperations) {
|
||||
super(settings, DeleteIndexAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, DeleteIndexRequest::new);
|
||||
this.deleteIndexService = deleteIndexService;
|
||||
|
@ -21,10 +21,7 @@ package org.elasticsearch.action.admin.indices.forcemerge;
|
||||
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.action.admin.indices.get;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
|
@ -32,12 +32,9 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaDataIndexStateService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Open index action
|
||||
*/
|
||||
@ -49,7 +46,7 @@ public class TransportOpenIndexAction extends TransportMasterNodeAction<OpenInde
|
||||
@Inject
|
||||
public TransportOpenIndexAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, MetaDataIndexStateService indexStateService,
|
||||
NodeSettingsService nodeSettingsService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
DestructiveOperations destructiveOperations) {
|
||||
super(settings, OpenIndexAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, OpenIndexRequest::new);
|
||||
this.indexStateService = indexStateService;
|
||||
|
@ -35,8 +35,6 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -20,9 +20,9 @@ package org.elasticsearch.action.admin.indices.shards;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -40,7 +40,7 @@ import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse.StoreStatus.*;
|
||||
import static org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse.StoreStatus.readStoreStatus;
|
||||
|
||||
/**
|
||||
* Response for {@link IndicesShardStoresAction}
|
||||
|
@ -21,14 +21,14 @@ package org.elasticsearch.action.admin.indices.shards;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.health.ClusterShardHealth;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.health.ClusterShardHealth;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
|
@ -28,7 +28,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.engine.CommitStats;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -28,7 +28,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.index.engine.CommitStats;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardPath;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.action.admin.indices.warmer.get;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -34,11 +34,9 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.warmer.IndexWarmersMetaData;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
|
@ -32,7 +32,10 @@ import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
/**
|
||||
@ -61,6 +64,9 @@ public class BulkProcessor implements Closeable {
|
||||
|
||||
/**
|
||||
* Callback after a failed execution of bulk request.
|
||||
*
|
||||
* Note that in case an instance of <code>InterruptedException</code> is passed, which means that request processing has been
|
||||
* cancelled externally, the thread's interruption status has been restored prior to calling this method.
|
||||
*/
|
||||
void afterBulk(long executionId, BulkRequest request, Throwable failure);
|
||||
}
|
||||
|
@ -74,11 +74,17 @@ abstract class BulkRequestHandler {
|
||||
.withSyncBackoff(client, bulkRequest);
|
||||
afterCalled = true;
|
||||
listener.afterBulk(executionId, bulkRequest, bulkResponse);
|
||||
} catch (Exception e) {
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
logger.info("Bulk request {} has been cancelled.", e, executionId);
|
||||
if (!afterCalled) {
|
||||
logger.warn("Failed to executed bulk request {}.", e, executionId);
|
||||
listener.afterBulk(executionId, bulkRequest, e);
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
logger.warn("Failed to execute bulk request {}.", t, executionId);
|
||||
if (!afterCalled) {
|
||||
listener.afterBulk(executionId, bulkRequest, t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -135,11 +141,11 @@ abstract class BulkRequestHandler {
|
||||
});
|
||||
bulkRequestSetupSuccessful = true;
|
||||
} catch (InterruptedException e) {
|
||||
// This is intentionally wrong to avoid changing the behaviour implicitly with this PR. It will be fixed in #14833
|
||||
Thread.interrupted();
|
||||
Thread.currentThread().interrupt();
|
||||
logger.info("Bulk request {} has been cancelled.", e, executionId);
|
||||
listener.afterBulk(executionId, bulkRequest, e);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("Failed to executed bulk request {}.", t, executionId);
|
||||
logger.warn("Failed to execute bulk request {}.", t, executionId);
|
||||
listener.afterBulk(executionId, bulkRequest, t);
|
||||
} finally {
|
||||
if (!bulkRequestSetupSuccessful && acquired) { // if we fail on client.bulk() release the semaphore
|
||||
|
@ -32,7 +32,7 @@ import org.elasticsearch.threadpool.ThreadPool;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
/**
|
||||
|
@ -239,7 +239,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
||||
}
|
||||
} else {
|
||||
concreteIndices.resolveIfAbsent(req);
|
||||
req.routing(clusterState.metaData().resolveIndexRouting(req.routing(), req.index()));
|
||||
req.routing(clusterState.metaData().resolveIndexRouting(req.parent(), req.routing(), req.index()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -50,6 +50,8 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
|
||||
private String id;
|
||||
@Nullable
|
||||
private String routing;
|
||||
@Nullable
|
||||
private String parent;
|
||||
private boolean refresh;
|
||||
private long version = Versions.MATCH_ANY;
|
||||
private VersionType versionType = VersionType.INTERNAL;
|
||||
@ -94,6 +96,7 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
|
||||
this.type = request.type();
|
||||
this.id = request.id();
|
||||
this.routing = request.routing();
|
||||
this.parent = request.parent();
|
||||
this.refresh = request.refresh();
|
||||
this.version = request.version();
|
||||
this.versionType = request.versionType();
|
||||
@ -155,13 +158,18 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the parent id of this document. Will simply set the routing to this value, as it is only
|
||||
* used for routing with delete requests.
|
||||
* @return The parent for this request.
|
||||
*/
|
||||
@Override
|
||||
public String parent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the parent id of this document.
|
||||
*/
|
||||
public DeleteRequest parent(String parent) {
|
||||
if (routing == null) {
|
||||
routing = parent;
|
||||
}
|
||||
this.parent = parent;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -230,6 +238,7 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
|
||||
type = in.readString();
|
||||
id = in.readString();
|
||||
routing = in.readOptionalString();
|
||||
parent = in.readOptionalString();
|
||||
refresh = in.readBoolean();
|
||||
version = in.readLong();
|
||||
versionType = VersionType.fromValue(in.readByte());
|
||||
@ -241,6 +250,7 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
|
||||
out.writeString(type);
|
||||
out.writeString(id);
|
||||
out.writeOptionalString(routing());
|
||||
out.writeOptionalString(parent());
|
||||
out.writeBoolean(refresh);
|
||||
out.writeLong(version);
|
||||
out.writeByte(versionType.getValue());
|
||||
|
@ -95,7 +95,7 @@ public class TransportDeleteAction extends TransportReplicationAction<DeleteRequ
|
||||
|
||||
@Override
|
||||
protected void resolveRequest(final MetaData metaData, String concreteIndex, DeleteRequest request) {
|
||||
request.routing(metaData.resolveIndexRouting(request.routing(), request.index()));
|
||||
request.routing(metaData.resolveIndexRouting(request.parent(), request.routing(), request.index()));
|
||||
if (metaData.hasIndex(concreteIndex)) {
|
||||
// check if routing is required, if so, do a broadcast delete
|
||||
MappingMetaData mappingMd = metaData.index(concreteIndex).mappingOrDefault(request.type());
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.action.explain;
|
||||
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -48,7 +48,14 @@ import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
|
||||
public class TransportFieldStatsTransportAction extends TransportBroadcastAction<FieldStatsRequest, FieldStatsResponse, FieldStatsShardRequest, FieldStatsShardResponse> {
|
||||
|
@ -49,6 +49,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
||||
private String type;
|
||||
private String id;
|
||||
private String routing;
|
||||
private String parent;
|
||||
private String preference;
|
||||
|
||||
private String[] fields;
|
||||
@ -77,6 +78,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
||||
this.type = getRequest.type;
|
||||
this.id = getRequest.id;
|
||||
this.routing = getRequest.routing;
|
||||
this.parent = getRequest.parent;
|
||||
this.preference = getRequest.preference;
|
||||
this.fields = getRequest.fields;
|
||||
this.fetchSourceContext = getRequest.fetchSourceContext;
|
||||
@ -153,13 +155,17 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the parent id of this document. Will simply set the routing to this value, as it is only
|
||||
* used for routing with delete requests.
|
||||
* @return The parent for this request.
|
||||
*/
|
||||
public String parent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the parent id of this document.
|
||||
*/
|
||||
public GetRequest parent(String parent) {
|
||||
if (routing == null) {
|
||||
routing = parent;
|
||||
}
|
||||
this.parent = parent;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -291,6 +297,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
||||
type = in.readString();
|
||||
id = in.readString();
|
||||
routing = in.readOptionalString();
|
||||
parent = in.readOptionalString();
|
||||
preference = in.readOptionalString();
|
||||
refresh = in.readBoolean();
|
||||
int size = in.readInt();
|
||||
@ -320,6 +327,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
||||
out.writeString(type);
|
||||
out.writeString(id);
|
||||
out.writeOptionalString(routing);
|
||||
out.writeOptionalString(parent);
|
||||
out.writeOptionalString(preference);
|
||||
|
||||
out.writeBoolean(refresh);
|
||||
|
@ -20,13 +20,17 @@
|
||||
package org.elasticsearch.action.get;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.*;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.CompositeIndicesRequest;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.RealtimeRequest;
|
||||
import org.elasticsearch.action.ValidateActions;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Iterators;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
@ -37,7 +41,11 @@ import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements Iterable<MultiGetRequest.Item>, CompositeIndicesRequest, RealtimeRequest {
|
||||
|
||||
@ -49,6 +57,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
|
||||
private String type;
|
||||
private String id;
|
||||
private String routing;
|
||||
private String parent;
|
||||
private String[] fields;
|
||||
private long version = Versions.MATCH_ANY;
|
||||
private VersionType versionType = VersionType.INTERNAL;
|
||||
@ -116,12 +125,17 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
|
||||
}
|
||||
|
||||
public Item parent(String parent) {
|
||||
if (routing == null) {
|
||||
this.routing = parent;
|
||||
}
|
||||
this.parent = parent;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The parent for this request.
|
||||
*/
|
||||
public String parent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
public Item fields(String... fields) {
|
||||
this.fields = fields;
|
||||
return this;
|
||||
@ -173,6 +187,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
|
||||
type = in.readOptionalString();
|
||||
id = in.readString();
|
||||
routing = in.readOptionalString();
|
||||
parent = in.readOptionalString();
|
||||
int size = in.readVInt();
|
||||
if (size > 0) {
|
||||
fields = new String[size];
|
||||
@ -192,6 +207,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
|
||||
out.writeOptionalString(type);
|
||||
out.writeString(id);
|
||||
out.writeOptionalString(routing);
|
||||
out.writeOptionalString(parent);
|
||||
if (fields == null) {
|
||||
out.writeVInt(0);
|
||||
} else {
|
||||
@ -221,6 +237,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
|
||||
if (!id.equals(item.id)) return false;
|
||||
if (!index.equals(item.index)) return false;
|
||||
if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false;
|
||||
if (parent != null ? !parent.equals(item.parent) : item.parent != null) return false;
|
||||
if (type != null ? !type.equals(item.type) : item.type != null) return false;
|
||||
if (versionType != item.versionType) return false;
|
||||
|
||||
@ -233,6 +250,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
|
||||
result = 31 * result + (type != null ? type.hashCode() : 0);
|
||||
result = 31 * result + id.hashCode();
|
||||
result = 31 * result + (routing != null ? routing.hashCode() : 0);
|
||||
result = 31 * result + (parent != null ? parent.hashCode() : 0);
|
||||
result = 31 * result + (fields != null ? Arrays.hashCode(fields) : 0);
|
||||
result = 31 * result + Long.hashCode(version);
|
||||
result = 31 * result + versionType.hashCode();
|
||||
|
@ -82,7 +82,7 @@ public class TransportGetAction extends TransportSingleShardAction<GetRequest, G
|
||||
request.request().preference(Preference.PRIMARY.type());
|
||||
}
|
||||
// update the routing (request#index here is possibly an alias)
|
||||
request.request().routing(state.metaData().resolveIndexRouting(request.request().routing(), request.request().index()));
|
||||
request.request().routing(state.metaData().resolveIndexRouting(request.request().parent(), request.request().routing(), request.request().index()));
|
||||
// Fail fast on the node that received the request.
|
||||
if (request.request().routing() == null && state.getMetaData().routingRequired(request.concreteIndex(), request.request().type())) {
|
||||
throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id());
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action.get;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
@ -69,7 +68,7 @@ public class TransportMultiGetAction extends HandledTransportAction<MultiGetRequ
|
||||
responses.set(i, new MultiGetItemResponse(null, new MultiGetResponse.Failure(item.index(), item.type(), item.id(), new IndexNotFoundException(item.index()))));
|
||||
continue;
|
||||
}
|
||||
item.routing(clusterState.metaData().resolveIndexRouting(item.routing(), item.index()));
|
||||
item.routing(clusterState.metaData().resolveIndexRouting(item.parent(), item.routing(), item.index()));
|
||||
String concreteSingleIndex = indexNameExpressionResolver.concreteSingleIndex(clusterState, item);
|
||||
if (item.routing() == null && clusterState.getMetaData().routingRequired(concreteSingleIndex, item.type())) {
|
||||
responses.set(i, new MultiGetItemResponse(null, new MultiGetResponse.Failure(concreteSingleIndex, item.type(), item.id(),
|
||||
|
@ -22,7 +22,11 @@ package org.elasticsearch.action.index;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.*;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.DocumentRequest;
|
||||
import org.elasticsearch.action.RoutingMissingException;
|
||||
import org.elasticsearch.action.TimestampParsingException;
|
||||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
@ -36,7 +40,11 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.*;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
@ -304,14 +312,10 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the parent id of this document. If routing is not set, automatically set it as the
|
||||
* routing as well.
|
||||
* Sets the parent id of this document.
|
||||
*/
|
||||
public IndexRequest parent(String parent) {
|
||||
this.parent = parent;
|
||||
if (routing == null) {
|
||||
routing = parent;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -593,7 +597,7 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
|
||||
|
||||
public void process(MetaData metaData, @Nullable MappingMetaData mappingMd, boolean allowIdGeneration, String concreteIndex) {
|
||||
// resolve the routing if needed
|
||||
routing(metaData.resolveIndexRouting(routing, index));
|
||||
routing(metaData.resolveIndexRouting(parent, routing, index));
|
||||
|
||||
// resolve timestamp if provided externally
|
||||
if (timestamp != null) {
|
||||
|
@ -30,7 +30,6 @@ import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* The response of a get script action.
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action.indexedscripts.put;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
@ -40,6 +39,7 @@ import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
@ -35,7 +35,11 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.highlight.HighlightField;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Encapsulates the response of a percolator request.
|
||||
|
@ -27,7 +27,6 @@ import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
|
@ -22,7 +22,12 @@ package org.elasticsearch.action.percolate;
|
||||
import com.carrotsearch.hppc.IntArrayList;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.UnavailableShardsException;
|
||||
import org.elasticsearch.action.get.*;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.get.MultiGetItemResponse;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetResponse;
|
||||
import org.elasticsearch.action.get.TransportMultiGetAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
|
||||
@ -42,7 +47,11 @@ import org.elasticsearch.percolator.PercolatorService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
|
||||
|
@ -28,7 +28,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -25,15 +25,12 @@ import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.SearchException;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
|
||||
import static org.elasticsearch.search.SearchShardTarget.readSearchShardTarget;
|
||||
|
||||
|
@ -20,7 +20,10 @@
|
||||
package org.elasticsearch.action.search;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.search.type.*;
|
||||
import org.elasticsearch.action.search.type.TransportSearchDfsQueryAndFetchAction;
|
||||
import org.elasticsearch.action.search.type.TransportSearchDfsQueryThenFetchAction;
|
||||
import org.elasticsearch.action.search.type.TransportSearchQueryAndFetchAction;
|
||||
import org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
@ -36,7 +39,8 @@ import org.elasticsearch.transport.TransportService;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.action.search.SearchType.*;
|
||||
import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH;
|
||||
import static org.elasticsearch.action.search.SearchType.QUERY_AND_FETCH;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -31,7 +31,8 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import static org.elasticsearch.action.search.type.ParsedScrollId.*;
|
||||
import static org.elasticsearch.action.search.type.ParsedScrollId.QUERY_AND_FETCH_TYPE;
|
||||
import static org.elasticsearch.action.search.type.ParsedScrollId.QUERY_THEN_FETCH_TYPE;
|
||||
import static org.elasticsearch.action.search.type.TransportSearchHelper.parseScrollId;
|
||||
|
||||
/**
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.action.search.type;
|
||||
|
||||
import com.carrotsearch.hppc.IntArrayList;
|
||||
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRunnable;
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.action.search.type;
|
||||
|
||||
import com.carrotsearch.hppc.IntArrayList;
|
||||
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRunnable;
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.action.search.type;
|
||||
|
||||
import com.carrotsearch.hppc.IntArrayList;
|
||||
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.search.ReduceSearchPhaseException;
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.action.search.type;
|
||||
|
||||
import com.carrotsearch.hppc.IntArrayList;
|
||||
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
|
@ -21,25 +21,30 @@ package org.elasticsearch.action.support;
|
||||
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
|
||||
/**
|
||||
* Helper for dealing with destructive operations and wildcard usage.
|
||||
*/
|
||||
public final class DestructiveOperations extends AbstractComponent implements NodeSettingsService.Listener {
|
||||
public final class DestructiveOperations extends AbstractComponent {
|
||||
|
||||
/**
|
||||
* Setting which controls whether wildcard usage (*, prefix*, _all) is allowed.
|
||||
*/
|
||||
public static final String REQUIRES_NAME = "action.destructive_requires_name";
|
||||
public static final Setting<Boolean> REQUIRES_NAME_SETTING = Setting.boolSetting("action.destructive_requires_name", false, true, Setting.Scope.CLUSTER);
|
||||
private volatile boolean destructiveRequiresName;
|
||||
|
||||
@Inject
|
||||
public DestructiveOperations(Settings settings, NodeSettingsService nodeSettingsService) {
|
||||
public DestructiveOperations(Settings settings, ClusterSettings clusterSettings) {
|
||||
super(settings);
|
||||
destructiveRequiresName = settings.getAsBoolean(DestructiveOperations.REQUIRES_NAME, false);
|
||||
nodeSettingsService.addListener(this);
|
||||
destructiveRequiresName = REQUIRES_NAME_SETTING.get(settings);
|
||||
clusterSettings.addSettingsUpdateConsumer(REQUIRES_NAME_SETTING, this::setDestructiveRequiresName);
|
||||
}
|
||||
|
||||
private void setDestructiveRequiresName(boolean destructiveRequiresName) {
|
||||
this.destructiveRequiresName = destructiveRequiresName;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -65,15 +70,6 @@ public final class DestructiveOperations extends AbstractComponent implements No
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
boolean newValue = settings.getAsBoolean(DestructiveOperations.REQUIRES_NAME, destructiveRequiresName);
|
||||
if (destructiveRequiresName != newValue) {
|
||||
logger.info("updating [action.operate_all_indices] from [{}] to [{}]", destructiveRequiresName, newValue);
|
||||
this.destructiveRequiresName = newValue;
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean hasWildcardUsage(String aliasOrIndex) {
|
||||
return "_all".equals(aliasOrIndex) || aliasOrIndex.indexOf('*') != -1;
|
||||
}
|
||||
|
@ -19,7 +19,11 @@
|
||||
|
||||
package org.elasticsearch.action.support;
|
||||
|
||||
import org.elasticsearch.action.*;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user