Merge remote-tracking branch 'es/7.x' into enrich-7.x
This commit is contained in:
commit
1f3db7eb3e
|
@ -8,3 +8,4 @@ ES_BUILD_JAVA=openjdk12
|
|||
ES_RUNTIME_JAVA=java8
|
||||
GRADLE_TASK=build
|
||||
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ ES_RUNTIME_JAVA:
|
|||
- java11
|
||||
- java12
|
||||
- openjdk12
|
||||
- openjdk13
|
||||
- zulu8
|
||||
- zulu11
|
||||
- zulu12
|
||||
|
|
|
@ -27,7 +27,7 @@ archivesBaseName = 'elasticsearch-benchmarks'
|
|||
test.enabled = false
|
||||
|
||||
dependencies {
|
||||
compile("org.elasticsearch:elasticsearch:${version}") {
|
||||
compile(project(":server")) {
|
||||
// JMH ships with the conflicting version 4.6. This prevents us from using jopt-simple in benchmarks (which should be ok) but allows
|
||||
// us to invoke the JMH uberjar as usual.
|
||||
exclude group: 'net.sf.jopt-simple', module: 'jopt-simple'
|
||||
|
|
102
build.gradle
102
build.gradle
|
@ -31,6 +31,7 @@ import org.gradle.plugins.ide.eclipse.model.SourceFolder
|
|||
plugins {
|
||||
id 'com.gradle.build-scan' version '2.2.1'
|
||||
id 'base'
|
||||
id 'elasticsearch.global-build-info'
|
||||
}
|
||||
if (properties.get("org.elasticsearch.acceptScanTOS", "false") == "true") {
|
||||
buildScan {
|
||||
|
@ -208,69 +209,7 @@ allprojects {
|
|||
javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet')
|
||||
}
|
||||
|
||||
/* Sets up the dependencies that we build as part of this project but
|
||||
register as though they were external to resolve internally. We register
|
||||
them as external dependencies so the build plugin that we use can be used
|
||||
to build elasticsearch plugins outside of the elasticsearch source tree. */
|
||||
ext.projectSubstitutions = [
|
||||
"org.elasticsearch.gradle:build-tools:${version}": ':build-tools',
|
||||
"org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec',
|
||||
"org.elasticsearch:elasticsearch:${version}": ':server',
|
||||
"org.elasticsearch:elasticsearch-cli:${version}": ':libs:elasticsearch-cli',
|
||||
"org.elasticsearch:elasticsearch-core:${version}": ':libs:core',
|
||||
"org.elasticsearch:elasticsearch-nio:${version}": ':libs:nio',
|
||||
"org.elasticsearch:elasticsearch-x-content:${version}": ':libs:x-content',
|
||||
"org.elasticsearch:elasticsearch-geo:${version}": ':libs:elasticsearch-geo',
|
||||
"org.elasticsearch:elasticsearch-secure-sm:${version}": ':libs:secure-sm',
|
||||
"org.elasticsearch:elasticsearch-ssl-config:${version}": ':libs:elasticsearch-ssl-config',
|
||||
"org.elasticsearch.client:elasticsearch-rest-client:${version}": ':client:rest',
|
||||
"org.elasticsearch.client:elasticsearch-rest-client-sniffer:${version}": ':client:sniffer',
|
||||
"org.elasticsearch.client:elasticsearch-rest-high-level-client:${version}": ':client:rest-high-level',
|
||||
"org.elasticsearch.client:test:${version}": ':client:test',
|
||||
"org.elasticsearch.client:transport:${version}": ':client:transport',
|
||||
"org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${version}": ':modules:lang-painless:spi',
|
||||
"org.elasticsearch.test:framework:${version}": ':test:framework',
|
||||
"org.elasticsearch.test:logger-usage:${version}": ':test:logger-usage',
|
||||
"org.elasticsearch.xpack.test:feature-aware:${version}": ':x-pack:test:feature-aware',
|
||||
// for transport client
|
||||
"org.elasticsearch.plugin:transport-netty4-client:${version}": ':modules:transport-netty4',
|
||||
"org.elasticsearch.plugin:reindex-client:${version}": ':modules:reindex',
|
||||
"org.elasticsearch.plugin:lang-mustache-client:${version}": ':modules:lang-mustache',
|
||||
"org.elasticsearch.plugin:parent-join-client:${version}": ':modules:parent-join',
|
||||
"org.elasticsearch.plugin:aggs-matrix-stats-client:${version}": ':modules:aggs-matrix-stats',
|
||||
"org.elasticsearch.plugin:percolator-client:${version}": ':modules:percolator',
|
||||
"org.elasticsearch.plugin:rank-eval-client:${version}": ':modules:rank-eval',
|
||||
// for security example plugins
|
||||
"org.elasticsearch.plugin:x-pack-core:${version}": ':x-pack:plugin:core',
|
||||
"org.elasticsearch.client:x-pack-transport:${version}": ':x-pack:transport-client'
|
||||
]
|
||||
|
||||
/*
|
||||
* Gradle only resolve project substitutions during dependency resolution but
|
||||
* we sometimes want to do the resolution at other times. This creates a
|
||||
* convenient method we can call to do it.
|
||||
*/
|
||||
ext.dependencyToProject = { Dependency dep ->
|
||||
if (dep instanceof ProjectDependency) {
|
||||
return dep.dependencyProject
|
||||
} else {
|
||||
String substitution = projectSubstitutions.get("${dep.group}:${dep.name}:${dep.version}")
|
||||
if (substitution != null) {
|
||||
return findProject(substitution)
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
project.afterEvaluate {
|
||||
configurations.all {
|
||||
resolutionStrategy.dependencySubstitution { DependencySubstitutions subs ->
|
||||
projectSubstitutions.each { k,v ->
|
||||
subs.substitute(subs.module(k)).with(subs.project(v))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle javadoc dependencies across projects. Order matters: the linksOffline for
|
||||
// org.elasticsearch:elasticsearch must be the last one or all the links for the
|
||||
// other packages (e.g org.elasticsearch.client) will point to server rather than
|
||||
|
@ -279,10 +218,10 @@ allprojects {
|
|||
String artifactsHost = VersionProperties.elasticsearch.endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
|
||||
Closure sortClosure = { a, b -> b.group <=> a.group }
|
||||
Closure depJavadocClosure = { shadowed, dep ->
|
||||
if (dep.group == null || false == dep.group.startsWith('org.elasticsearch')) {
|
||||
if ((dep instanceof ProjectDependency) == false) {
|
||||
return
|
||||
}
|
||||
Project upstreamProject = project.ext.dependencyToProject(dep)
|
||||
Project upstreamProject = dep.dependencyProject
|
||||
if (upstreamProject == null) {
|
||||
return
|
||||
}
|
||||
|
@ -337,9 +276,9 @@ gradle.projectsEvaluated {
|
|||
if (tasks.findByPath('test') != null && tasks.findByPath('integTest') != null) {
|
||||
integTest.mustRunAfter test
|
||||
}
|
||||
configurations.all { Configuration configuration ->
|
||||
dependencies.all { Dependency dep ->
|
||||
Project upstreamProject = dependencyToProject(dep)
|
||||
configurations.matching { it.canBeResolved }.all { Configuration configuration ->
|
||||
dependencies.matching { it instanceof ProjectDependency }.all { ProjectDependency dep ->
|
||||
Project upstreamProject = dep.dependencyProject
|
||||
if (upstreamProject != null) {
|
||||
if (project.path == upstreamProject.path) {
|
||||
// TODO: distribution integ tests depend on themselves (!), fix that
|
||||
|
@ -552,31 +491,6 @@ gradle.projectsEvaluated {
|
|||
}
|
||||
}
|
||||
|
||||
if (System.properties.get("build.compare") != null) {
|
||||
apply plugin: 'compare-gradle-builds'
|
||||
compareGradleBuilds {
|
||||
ext.referenceProject = System.properties.get("build.compare")
|
||||
doFirst {
|
||||
if (file(referenceProject).exists() == false) {
|
||||
throw new GradleException(
|
||||
"Use git worktree to check out a version to compare against to ../elasticsearch_build_reference"
|
||||
)
|
||||
}
|
||||
}
|
||||
sourceBuild {
|
||||
gradleVersion = gradle.getGradleVersion()
|
||||
projectDir = referenceProject
|
||||
tasks = ["clean", "assemble"]
|
||||
arguments = ["-Dbuild.compare_friendly=true"]
|
||||
}
|
||||
targetBuild {
|
||||
tasks = ["clean", "assemble"]
|
||||
// use -Dorg.gradle.java.home= to alter jdk versions
|
||||
arguments = ["-Dbuild.compare_friendly=true"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
task resolveAllDependencies {
|
||||
dependsOn tasks.matching { it.name == "pullFixture"}
|
||||
|
@ -618,7 +532,3 @@ allprojects {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -65,39 +65,10 @@ processResources {
|
|||
if (JavaVersion.current() < JavaVersion.VERSION_11) {
|
||||
throw new GradleException('At least Java 11 is required to build elasticsearch gradle tools')
|
||||
}
|
||||
// Gradle 4.10 does not support setting this to 11 yet
|
||||
targetCompatibility = "10"
|
||||
sourceCompatibility = "10"
|
||||
|
||||
// We have a few classes that need to be compiled for older java versions because these are used to run checks against
|
||||
// those
|
||||
sourceSets {
|
||||
minimumRuntime {
|
||||
// We only want Java here, but the Groovy doesn't configure javadoc correctly if we don't define this as groovy
|
||||
groovy {
|
||||
srcDirs = ['src/main/minimumRuntime']
|
||||
}
|
||||
}
|
||||
}
|
||||
compileMinimumRuntimeGroovy {
|
||||
// We can't use BuildPlugin here, so read from file
|
||||
String minimumRuntimeVersion = file('src/main/resources/minimumRuntimeVersion').text.trim()
|
||||
targetCompatibility = minimumRuntimeVersion
|
||||
sourceCompatibility = minimumRuntimeVersion
|
||||
}
|
||||
dependencies {
|
||||
if (project.ext.has("isEclipse") == false || project.ext.isEclipse == false) {
|
||||
// eclipse is confused if this is set explicitly
|
||||
compile sourceSets.minimumRuntime.output
|
||||
}
|
||||
minimumRuntimeCompile "junit:junit:${props.getProperty('junit')}"
|
||||
minimumRuntimeCompile localGroovy()
|
||||
minimumRuntimeCompile gradleApi()
|
||||
}
|
||||
jar {
|
||||
from sourceSets.minimumRuntime.output
|
||||
}
|
||||
|
||||
// Keep compatibility with Java 8 for external users of build-tools that haven't migrated to Java 11
|
||||
targetCompatibility = '8'
|
||||
sourceCompatibility = '8'
|
||||
|
||||
/*****************************************************************************
|
||||
* Dependencies used by the entire build *
|
||||
|
@ -110,6 +81,8 @@ repositories {
|
|||
dependencies {
|
||||
compile localGroovy()
|
||||
|
||||
compile 'commons-codec:commons-codec:1.12'
|
||||
|
||||
compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3'
|
||||
compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4'
|
||||
compile 'com.netflix.nebula:gradle-info-plugin:3.0.3'
|
||||
|
@ -117,7 +90,7 @@ dependencies {
|
|||
compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE....
|
||||
compile 'org.apache.rat:apache-rat:0.11'
|
||||
compile "org.elasticsearch:jna:4.5.1"
|
||||
compile 'com.github.jengelman.gradle.plugins:shadow:2.0.4'
|
||||
compile 'com.github.jengelman.gradle.plugins:shadow:4.0.3'
|
||||
compile 'de.thetaphi:forbiddenapis:2.6'
|
||||
compile 'com.avast.gradle:gradle-docker-compose-plugin:0.8.12'
|
||||
testCompile "junit:junit:${props.getProperty('junit')}"
|
||||
|
@ -162,7 +135,6 @@ if (project != rootProject) {
|
|||
dependenciesInfo.enabled = false
|
||||
forbiddenApisMain.enabled = false
|
||||
forbiddenApisTest.enabled = false
|
||||
forbiddenApisMinimumRuntime.enabled = false
|
||||
jarHell.enabled = false
|
||||
thirdPartyAudit.enabled = false
|
||||
|
||||
|
@ -179,21 +151,17 @@ if (project != rootProject) {
|
|||
distribution project(':distribution:archives:oss-linux-tar')
|
||||
}
|
||||
|
||||
// for external projects we want to remove the marker file indicating we are running the Elasticsearch project
|
||||
processResources {
|
||||
exclude 'buildSrc.marker'
|
||||
}
|
||||
|
||||
String localDownloads = "${rootProject.buildDir}/local-downloads"
|
||||
task setupLocalDownloads(type:Copy) {
|
||||
from configurations.distribution
|
||||
into localDownloads
|
||||
}
|
||||
|
||||
test {
|
||||
// The test task is configured to runtimeJava version, but build-tools doesn't support all of them, so test
|
||||
// with compiler instead on the ones that are too old.
|
||||
if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_10) {
|
||||
executable = "${project.compilerJavaHome}/bin/java"
|
||||
}
|
||||
}
|
||||
|
||||
// This can't be an RandomizedTestingTask because we can't yet reference it
|
||||
task integTest(type: Test) {
|
||||
// integration test requires the local testing repo for example plugin builds
|
||||
dependsOn project.rootProject.allprojects.collect {
|
||||
|
@ -217,6 +185,8 @@ if (project != rootProject) {
|
|||
systemProperty 'test.lucene-snapshot-revision', isLuceneSnapshot[0][1]
|
||||
}
|
||||
maxParallelForks System.getProperty('tests.jvms', project.rootProject.ext.defaultParallel.toString()) as Integer
|
||||
// These tests run Gradle which doesn't have FIPS support
|
||||
onlyIf { project.inFipsJvm == false }
|
||||
}
|
||||
check.dependsOn(integTest)
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
import org.gradle.internal.nativeintegration.filesystem.Chmod
|
||||
|
||||
import javax.inject.Inject
|
||||
|
||||
/**
|
||||
* Creates an empty directory.
|
||||
*/
|
||||
class EmptyDirTask extends DefaultTask {
|
||||
@Input
|
||||
Object dir
|
||||
|
||||
@Input
|
||||
int dirMode = 0755
|
||||
|
||||
@TaskAction
|
||||
void create() {
|
||||
dir = dir as File
|
||||
dir.mkdirs()
|
||||
getChmod().chmod(dir, dirMode)
|
||||
}
|
||||
|
||||
@Inject
|
||||
Chmod getChmod() {
|
||||
throw new UnsupportedOperationException()
|
||||
}
|
||||
}
|
|
@ -43,7 +43,7 @@ public class SnippetsTask extends DefaultTask {
|
|||
private static final String SKIP = /skip:([^\]]+)/
|
||||
private static final String SETUP = /setup:([^ \]]+)/
|
||||
private static final String WARNING = /warning:(.+)/
|
||||
private static final String CAT = /(_cat)/
|
||||
private static final String NON_JSON = /(non_json)/
|
||||
private static final String TEST_SYNTAX =
|
||||
/(?:$CATCH|$SUBSTITUTION|$SKIP|(continued)|$SETUP|$WARNING|(skip_shard_failures)) ?/
|
||||
|
||||
|
@ -255,12 +255,12 @@ public class SnippetsTask extends DefaultTask {
|
|||
substitutions = []
|
||||
}
|
||||
String loc = "$file:$lineNumber"
|
||||
parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$CAT|$SKIP) ?/) {
|
||||
parse(loc, matcher.group(2), /(?:$SUBSTITUTION|$NON_JSON|$SKIP) ?/) {
|
||||
if (it.group(1) != null) {
|
||||
// TESTRESPONSE[s/adsf/jkl/]
|
||||
substitutions.add([it.group(1), it.group(2)])
|
||||
} else if (it.group(3) != null) {
|
||||
// TESTRESPONSE[_cat]
|
||||
// TESTRESPONSE[non_json]
|
||||
substitutions.add(['^', '/'])
|
||||
substitutions.add(['\n$', '\\\\s*/'])
|
||||
substitutions.add(['( +)', '$1\\\\s+'])
|
||||
|
|
|
@ -27,7 +27,9 @@ import org.elasticsearch.gradle.VersionProperties
|
|||
import org.elasticsearch.gradle.test.RestIntegTestTask
|
||||
import org.elasticsearch.gradle.test.RunTask
|
||||
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
|
||||
import org.elasticsearch.gradle.tool.ClasspathUtils
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.publish.maven.MavenPublication
|
||||
|
@ -43,13 +45,13 @@ import java.util.regex.Pattern
|
|||
/**
|
||||
* Encapsulates build configuration for an Elasticsearch plugin.
|
||||
*/
|
||||
class PluginBuildPlugin extends BuildPlugin {
|
||||
class PluginBuildPlugin implements Plugin<Project> {
|
||||
|
||||
public static final String PLUGIN_EXTENSION_NAME = 'esplugin'
|
||||
|
||||
@Override
|
||||
void apply(Project project) {
|
||||
super.apply(project)
|
||||
project.pluginManager.apply(BuildPlugin)
|
||||
|
||||
PluginPropertiesExtension extension = project.extensions.create(PLUGIN_EXTENSION_NAME, PluginPropertiesExtension, project)
|
||||
configureDependencies(project)
|
||||
|
@ -153,8 +155,13 @@ class PluginBuildPlugin extends BuildPlugin {
|
|||
|
||||
private static void configureDependencies(Project project) {
|
||||
project.dependencies {
|
||||
compileOnly "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}"
|
||||
testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}"
|
||||
if (ClasspathUtils.isElasticsearchProject()) {
|
||||
compileOnly project.project(':server')
|
||||
testCompile project.project(':test:framework')
|
||||
} else {
|
||||
compileOnly "org.elasticsearch:elasticsearch:${project.versions.elasticsearch}"
|
||||
testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}"
|
||||
}
|
||||
// we "upgrade" these optional deps to provided for plugins, since they will run
|
||||
// with a full elasticsearch server that includes optional deps
|
||||
compileOnly "org.locationtech.spatial4j:spatial4j:${project.versions.spatial4j}"
|
||||
|
|
|
@ -1,94 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.plugin
|
||||
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputFile
|
||||
|
||||
/**
|
||||
* A container for plugin properties that will be written to the plugin descriptor, for easy
|
||||
* manipulation in the gradle DSL.
|
||||
*/
|
||||
class PluginPropertiesExtension {
|
||||
|
||||
@Input
|
||||
String name
|
||||
|
||||
@Input
|
||||
String version
|
||||
|
||||
@Input
|
||||
String description
|
||||
|
||||
@Input
|
||||
String classname
|
||||
|
||||
/** Other plugins this plugin extends through SPI */
|
||||
@Input
|
||||
List<String> extendedPlugins = []
|
||||
|
||||
@Input
|
||||
boolean hasNativeController = false
|
||||
|
||||
/** Indicates whether the plugin jar should be made available for the transport client. */
|
||||
@Input
|
||||
boolean hasClientJar = false
|
||||
|
||||
/** True if the plugin requires the elasticsearch keystore to exist, false otherwise. */
|
||||
@Input
|
||||
boolean requiresKeystore = false
|
||||
|
||||
/** A license file that should be included in the built plugin zip. */
|
||||
private File licenseFile = null
|
||||
|
||||
/**
|
||||
* A notice file that should be included in the built plugin zip. This will be
|
||||
* extended with notices from the {@code licenses/} directory.
|
||||
*/
|
||||
private File noticeFile = null
|
||||
|
||||
Project project = null
|
||||
|
||||
PluginPropertiesExtension(Project project) {
|
||||
name = project.name
|
||||
version = project.version
|
||||
this.project = project
|
||||
}
|
||||
|
||||
@InputFile
|
||||
File getLicenseFile() {
|
||||
return licenseFile
|
||||
}
|
||||
|
||||
void setLicenseFile(File licenseFile) {
|
||||
project.ext.licenseFile = licenseFile
|
||||
this.licenseFile = licenseFile
|
||||
}
|
||||
|
||||
@InputFile
|
||||
File getNoticeFile() {
|
||||
return noticeFile
|
||||
}
|
||||
|
||||
void setNoticeFile(File noticeFile) {
|
||||
project.ext.noticeFile = noticeFile
|
||||
this.noticeFile = noticeFile
|
||||
}
|
||||
}
|
|
@ -1,268 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.tasks.Input
|
||||
import org.gradle.api.tasks.InputDirectory
|
||||
import org.gradle.api.tasks.InputFiles
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.security.MessageDigest
|
||||
import java.util.regex.Matcher
|
||||
import java.util.regex.Pattern
|
||||
|
||||
/**
|
||||
* A task to check licenses for dependencies.
|
||||
*
|
||||
* There are two parts to the check:
|
||||
* <ul>
|
||||
* <li>LICENSE and NOTICE files</li>
|
||||
* <li>SHA checksums for each dependency jar</li>
|
||||
* </ul>
|
||||
*
|
||||
* The directory to find the license and sha files in defaults to the dir @{code licenses}
|
||||
* in the project directory for this task. You can override this directory:
|
||||
* <pre>
|
||||
* dependencyLicenses {
|
||||
* licensesDir = project.file('mybetterlicensedir')
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* The jar files to check default to the dependencies from the default configuration. You
|
||||
* can override this, for example, to only check compile dependencies:
|
||||
* <pre>
|
||||
* dependencyLicenses {
|
||||
* dependencies = project.configurations.compile
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* Every jar must have a {@code .sha1} file in the licenses dir. These can be managed
|
||||
* automatically using the {@code updateShas} helper task that is created along
|
||||
* with this task. It will add {@code .sha1} files for new jars that are in dependencies
|
||||
* and remove old {@code .sha1} files that are no longer needed.
|
||||
*
|
||||
* Every jar must also have a LICENSE and NOTICE file. However, multiple jars can share
|
||||
* LICENSE and NOTICE files by mapping a pattern to the same name.
|
||||
* <pre>
|
||||
* dependencyLicenses {
|
||||
* mapping from: /lucene-.*/, to: 'lucene'
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public class DependencyLicensesTask extends DefaultTask {
|
||||
static final String SHA_EXTENSION = '.sha1'
|
||||
|
||||
// TODO: we should be able to default this to eg compile deps, but we need to move the licenses
|
||||
// check from distribution to core (ie this should only be run on java projects)
|
||||
/** A collection of jar files that should be checked. */
|
||||
@InputFiles
|
||||
public FileCollection dependencies
|
||||
|
||||
/** The directory to find the license and sha files in. */
|
||||
@InputDirectory
|
||||
public File licensesDir = new File(project.projectDir, 'licenses')
|
||||
|
||||
/** A map of patterns to prefix, used to find the LICENSE and NOTICE file. */
|
||||
private LinkedHashMap<String, String> mappings = new LinkedHashMap<>()
|
||||
|
||||
/** Names of dependencies whose shas should not exist. */
|
||||
private Set<String> ignoreShas = new HashSet<>()
|
||||
|
||||
/**
|
||||
* Add a mapping from a regex pattern for the jar name, to a prefix to find
|
||||
* the LICENSE and NOTICE file for that jar.
|
||||
*/
|
||||
@Input
|
||||
public void mapping(Map<String, String> props) {
|
||||
String from = props.remove('from')
|
||||
if (from == null) {
|
||||
throw new InvalidUserDataException('Missing "from" setting for license name mapping')
|
||||
}
|
||||
String to = props.remove('to')
|
||||
if (to == null) {
|
||||
throw new InvalidUserDataException('Missing "to" setting for license name mapping')
|
||||
}
|
||||
if (props.isEmpty() == false) {
|
||||
throw new InvalidUserDataException("Unknown properties for mapping on dependencyLicenses: ${props.keySet()}")
|
||||
}
|
||||
mappings.put(from, to)
|
||||
}
|
||||
|
||||
public LinkedHashMap<String, String> getMappings() {
|
||||
return new LinkedHashMap<>(mappings)
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a rule which will skip SHA checking for the given dependency name. This should be used for
|
||||
* locally build dependencies, which cause the sha to change constantly.
|
||||
*/
|
||||
@Input
|
||||
public void ignoreSha(String dep) {
|
||||
ignoreShas.add(dep)
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void checkDependencies() {
|
||||
if (dependencies.isEmpty()) {
|
||||
if (licensesDir.exists()) {
|
||||
throw new GradleException("Licenses dir ${licensesDir} exists, but there are no dependencies")
|
||||
}
|
||||
return // no dependencies to check
|
||||
} else if (licensesDir.exists() == false) {
|
||||
throw new GradleException("Licences dir ${licensesDir} does not exist, but there are dependencies")
|
||||
}
|
||||
|
||||
Map<String, Integer> licenses = new HashMap<>()
|
||||
Map<String, Integer> notices = new HashMap<>()
|
||||
Set<File> shaFiles = new HashSet<File>()
|
||||
|
||||
licensesDir.eachFile {
|
||||
String name = it.getName()
|
||||
if (name.endsWith(SHA_EXTENSION)) {
|
||||
shaFiles.add(it)
|
||||
} else if (name.endsWith('-LICENSE') || name.endsWith('-LICENSE.txt')) {
|
||||
// TODO: why do we support suffix of LICENSE *and* LICENSE.txt??
|
||||
licenses.put(name, 0)
|
||||
} else if (name.contains('-NOTICE') || name.contains('-NOTICE.txt')) {
|
||||
notices.put(name, 0)
|
||||
}
|
||||
}
|
||||
|
||||
for (File dependency : dependencies) {
|
||||
String jarName = dependency.getName()
|
||||
String depName = jarName - ~/\-v?\d+.*/
|
||||
if (ignoreShas.contains(depName)) {
|
||||
// local deps should not have sha files!
|
||||
if (getShaFile(jarName).exists()) {
|
||||
throw new GradleException("SHA file ${getShaFile(jarName)} exists for ignored dependency ${depName}")
|
||||
}
|
||||
} else {
|
||||
logger.info("Checking sha for " + jarName)
|
||||
checkSha(dependency, jarName, shaFiles)
|
||||
}
|
||||
|
||||
final String dependencyName = getDependencyName(mappings, depName)
|
||||
logger.info("mapped dependency name ${depName} to ${dependencyName} for license/notice check")
|
||||
checkFile(dependencyName, jarName, licenses, 'LICENSE')
|
||||
checkFile(dependencyName, jarName, notices, 'NOTICE')
|
||||
}
|
||||
|
||||
licenses.each { license, count ->
|
||||
if (count == 0) {
|
||||
throw new GradleException("Unused license ${license}")
|
||||
}
|
||||
}
|
||||
notices.each { notice, count ->
|
||||
if (count == 0) {
|
||||
throw new GradleException("Unused notice ${notice}")
|
||||
}
|
||||
}
|
||||
if (shaFiles.isEmpty() == false) {
|
||||
throw new GradleException("Unused sha files found: \n${shaFiles.join('\n')}")
|
||||
}
|
||||
}
|
||||
|
||||
public static String getDependencyName(final LinkedHashMap<String, String> mappings, final String dependencyName) {
|
||||
// order is the same for keys and values iteration since we use a linked hashmap
|
||||
List<String> mapped = new ArrayList<>(mappings.values())
|
||||
Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')')
|
||||
Matcher match = mappingsPattern.matcher(dependencyName)
|
||||
if (match.matches()) {
|
||||
int i = 0
|
||||
while (i < match.groupCount() && match.group(i + 1) == null) ++i;
|
||||
return mapped.get(i)
|
||||
}
|
||||
return dependencyName
|
||||
}
|
||||
|
||||
private File getShaFile(String jarName) {
|
||||
return new File(licensesDir, jarName + SHA_EXTENSION)
|
||||
}
|
||||
|
||||
private void checkSha(File jar, String jarName, Set<File> shaFiles) {
|
||||
File shaFile = getShaFile(jarName)
|
||||
if (shaFile.exists() == false) {
|
||||
throw new GradleException("Missing SHA for ${jarName}. Run 'gradle updateSHAs' to create")
|
||||
}
|
||||
// TODO: shouldn't have to trim, sha files should not have trailing newline
|
||||
String expectedSha = shaFile.getText('UTF-8').trim()
|
||||
String sha = MessageDigest.getInstance("SHA-1").digest(jar.getBytes()).encodeHex().toString()
|
||||
if (expectedSha.equals(sha) == false) {
|
||||
throw new GradleException("SHA has changed! Expected ${expectedSha} for ${jarName} but got ${sha}. " +
|
||||
"\nThis usually indicates a corrupt dependency cache or artifacts changed upstream." +
|
||||
"\nEither wipe your cache, fix the upstream artifact, or delete ${shaFile} and run updateShas")
|
||||
}
|
||||
shaFiles.remove(shaFile)
|
||||
}
|
||||
|
||||
private void checkFile(String name, String jarName, Map<String, Integer> counters, String type) {
|
||||
String fileName = "${name}-${type}"
|
||||
Integer count = counters.get(fileName)
|
||||
if (count == null) {
|
||||
// try the other suffix...TODO: get rid of this, just support ending in .txt
|
||||
fileName = "${fileName}.txt"
|
||||
counters.get(fileName)
|
||||
}
|
||||
count = counters.get(fileName)
|
||||
if (count == null) {
|
||||
throw new GradleException("Missing ${type} for ${jarName}, expected in ${fileName}")
|
||||
}
|
||||
counters.put(fileName, count + 1)
|
||||
}
|
||||
|
||||
/** A helper task to update the sha files in the license dir. */
|
||||
public static class UpdateShasTask extends DefaultTask {
|
||||
private DependencyLicensesTask parentTask
|
||||
|
||||
@TaskAction
|
||||
public void updateShas() {
|
||||
Set<File> shaFiles = new HashSet<File>()
|
||||
parentTask.licensesDir.eachFile {
|
||||
String name = it.getName()
|
||||
if (name.endsWith(SHA_EXTENSION)) {
|
||||
shaFiles.add(it)
|
||||
}
|
||||
}
|
||||
for (File dependency : parentTask.dependencies) {
|
||||
String jarName = dependency.getName()
|
||||
String depName = jarName - ~/\-\d+.*/
|
||||
if (parentTask.ignoreShas.contains(depName)) {
|
||||
continue
|
||||
}
|
||||
File shaFile = new File(parentTask.licensesDir, jarName + SHA_EXTENSION)
|
||||
if (shaFile.exists() == false) {
|
||||
logger.lifecycle("Adding sha for ${jarName}")
|
||||
String sha = MessageDigest.getInstance("SHA-1").digest(dependency.getBytes()).encodeHex().toString()
|
||||
shaFile.setText(sha, 'UTF-8')
|
||||
} else {
|
||||
shaFiles.remove(shaFile)
|
||||
}
|
||||
}
|
||||
shaFiles.each { shaFile ->
|
||||
logger.lifecycle("Removing unused sha ${shaFile.getName()}")
|
||||
Files.delete(shaFile.toPath())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -23,11 +23,13 @@ import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
|
|||
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin
|
||||
import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.tool.ClasspathUtils
|
||||
import org.gradle.api.JavaVersion
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.api.plugins.quality.Checkstyle
|
||||
|
||||
/**
|
||||
* Validation tasks which should be run before committing. These run before tests.
|
||||
*/
|
||||
|
@ -40,18 +42,18 @@ class PrecommitTasks {
|
|||
public static Task create(Project project, boolean includeDependencyLicenses) {
|
||||
project.configurations.create("forbiddenApisCliJar")
|
||||
project.dependencies {
|
||||
forbiddenApisCliJar ('de.thetaphi:forbiddenapis:2.6')
|
||||
forbiddenApisCliJar('de.thetaphi:forbiddenapis:2.6')
|
||||
}
|
||||
|
||||
List<Task> precommitTasks = [
|
||||
configureCheckstyle(project),
|
||||
configureForbiddenApisCli(project),
|
||||
project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class),
|
||||
project.tasks.create('licenseHeaders', LicenseHeadersTask.class),
|
||||
project.tasks.create('filepermissions', FilePermissionsTask.class),
|
||||
configureJarHell(project),
|
||||
configureThirdPartyAudit(project),
|
||||
configureTestingConventions(project)
|
||||
configureCheckstyle(project),
|
||||
configureForbiddenApisCli(project),
|
||||
project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class),
|
||||
project.tasks.create('licenseHeaders', LicenseHeadersTask.class),
|
||||
project.tasks.create('filepermissions', FilePermissionsTask.class),
|
||||
configureJarHell(project),
|
||||
configureThirdPartyAudit(project),
|
||||
configureTestingConventions(project)
|
||||
]
|
||||
|
||||
// tasks with just tests don't need dependency licenses, so this flag makes adding
|
||||
|
@ -85,10 +87,10 @@ class PrecommitTasks {
|
|||
}
|
||||
|
||||
return project.tasks.create([
|
||||
name: 'precommit',
|
||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
||||
description: 'Runs all non-test checks.',
|
||||
dependsOn: precommitTasks
|
||||
name : 'precommit',
|
||||
group : JavaBasePlugin.VERIFICATION_GROUP,
|
||||
description: 'Runs all non-test checks.',
|
||||
dependsOn : precommitTasks
|
||||
])
|
||||
}
|
||||
|
||||
|
@ -107,26 +109,22 @@ class PrecommitTasks {
|
|||
}
|
||||
|
||||
private static Task configureJarHell(Project project) {
|
||||
Task task = project.tasks.create('jarHell', JarHellTask.class)
|
||||
task.classpath = project.sourceSets.test.runtimeClasspath
|
||||
if (project.plugins.hasPlugin(ShadowPlugin)) {
|
||||
task.classpath += project.configurations.bundle
|
||||
return project.tasks.create('jarHell', JarHellTask) { task ->
|
||||
task.classpath = project.sourceSets.test.runtimeClasspath
|
||||
if (project.plugins.hasPlugin(ShadowPlugin)) {
|
||||
task.classpath += project.configurations.bundle
|
||||
}
|
||||
}
|
||||
task.dependsOn(project.sourceSets.test.classesTaskName)
|
||||
task.javaHome = project.runtimeJavaHome
|
||||
return task
|
||||
}
|
||||
|
||||
private static Task configureThirdPartyAudit(Project project) {
|
||||
ThirdPartyAuditTask thirdPartyAuditTask = project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class)
|
||||
ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources')
|
||||
thirdPartyAuditTask.configure {
|
||||
dependsOn(buildResources)
|
||||
signatureFile = buildResources.copy("forbidden/third-party-audit.txt")
|
||||
javaHome = project.runtimeJavaHome
|
||||
targetCompatibility = project.runtimeJavaVersion
|
||||
return project.tasks.create('thirdPartyAudit', ThirdPartyAuditTask.class) { task ->
|
||||
task.dependsOn(buildResources)
|
||||
task.signatureFile = buildResources.copy("forbidden/third-party-audit.txt")
|
||||
task.javaHome = project.runtimeJavaHome
|
||||
task.targetCompatibility.set(project.provider({ project.runtimeJavaVersion }))
|
||||
}
|
||||
return thirdPartyAuditTask
|
||||
}
|
||||
|
||||
private static Task configureForbiddenApisCli(Project project) {
|
||||
|
@ -134,16 +132,16 @@ class PrecommitTasks {
|
|||
ExportElasticsearchBuildResourcesTask buildResources = project.tasks.getByName('buildResources')
|
||||
project.tasks.withType(CheckForbiddenApis) {
|
||||
dependsOn(buildResources)
|
||||
targetCompatibility = project.runtimeJavaVersion >= JavaVersion.VERSION_1_9 ?
|
||||
project.runtimeJavaVersion.getMajorVersion() : project.runtimeJavaVersion
|
||||
if (project.runtimeJavaVersion > JavaVersion.VERSION_11) {
|
||||
doLast {
|
||||
doFirst {
|
||||
// we need to defer this configuration since we don't know the runtime java version until execution time
|
||||
targetCompatibility = project.runtimeJavaVersion.getMajorVersion()
|
||||
if (project.runtimeJavaVersion > JavaVersion.VERSION_11) {
|
||||
project.logger.info(
|
||||
"Forbidden APIs does not support java version past 11. Will use the signatures from 11 for ",
|
||||
project.runtimeJavaVersion
|
||||
)
|
||||
targetCompatibility = JavaVersion.VERSION_11.getMajorVersion()
|
||||
}
|
||||
targetCompatibility = JavaVersion.VERSION_11.getMajorVersion()
|
||||
}
|
||||
bundledSignatures = [
|
||||
"jdk-unsafe", "jdk-deprecated", "jdk-non-portable", "jdk-system-out"
|
||||
|
@ -172,7 +170,7 @@ class PrecommitTasks {
|
|||
)
|
||||
}
|
||||
}
|
||||
Task forbiddenApis = project.tasks.getByName("forbiddenApis")
|
||||
Task forbiddenApis = project.tasks.getByName("forbiddenApis")
|
||||
forbiddenApis.group = ""
|
||||
return forbiddenApis
|
||||
}
|
||||
|
@ -215,7 +213,7 @@ class PrecommitTasks {
|
|||
project.checkstyle {
|
||||
config = project.resources.text.fromFile(checkstyleConf, 'UTF-8')
|
||||
configProperties = [
|
||||
suppressions: checkstyleSuppressions
|
||||
suppressions: checkstyleSuppressions
|
||||
]
|
||||
toolVersion = CHECKSTYLE_VERSION
|
||||
}
|
||||
|
@ -233,9 +231,11 @@ class PrecommitTasks {
|
|||
}
|
||||
|
||||
private static Task configureLoggerUsage(Project project) {
|
||||
Object dependency = ClasspathUtils.isElasticsearchProject() ? project.project(':test:logger-usage') :
|
||||
"org.elasticsearch.test:logger-usage:${VersionProperties.elasticsearch}"
|
||||
|
||||
project.configurations.create('loggerUsagePlugin')
|
||||
project.dependencies.add('loggerUsagePlugin',
|
||||
"org.elasticsearch.test:logger-usage:${VersionProperties.elasticsearch}")
|
||||
project.dependencies.add('loggerUsagePlugin', dependency)
|
||||
return project.tasks.create('loggerUsageCheck', LoggerUsageTask.class) {
|
||||
classpath = project.configurations.loggerUsagePlugin
|
||||
}
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import org.gradle.api.DefaultTask
|
||||
import org.gradle.api.tasks.TaskAction
|
||||
|
||||
import java.nio.file.Files
|
||||
import java.security.MessageDigest
|
||||
|
||||
/**
|
||||
* A task to update shas used by {@code DependencyLicensesCheck}
|
||||
*/
|
||||
public class UpdateShasTask extends DefaultTask {
|
||||
|
||||
/** The parent dependency licenses task to use configuration from */
|
||||
public DependencyLicensesTask parentTask
|
||||
|
||||
public UpdateShasTask() {
|
||||
description = 'Updates the sha files for the dependencyLicenses check'
|
||||
onlyIf { parentTask.licensesDir.exists() }
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void updateShas() {
|
||||
Set<File> shaFiles = new HashSet<File>()
|
||||
parentTask.licensesDir.eachFile {
|
||||
String name = it.getName()
|
||||
if (name.endsWith(DependencyLicensesTask.SHA_EXTENSION)) {
|
||||
shaFiles.add(it)
|
||||
}
|
||||
}
|
||||
for (File dependency : parentTask.dependencies) {
|
||||
String jarName = dependency.getName()
|
||||
File shaFile = new File(parentTask.licensesDir, jarName + DependencyLicensesTask.SHA_EXTENSION)
|
||||
if (shaFile.exists() == false) {
|
||||
logger.lifecycle("Adding sha for ${jarName}")
|
||||
String sha = MessageDigest.getInstance("SHA-1").digest(dependency.getBytes()).encodeHex().toString()
|
||||
shaFile.setText(sha, 'UTF-8')
|
||||
} else {
|
||||
shaFiles.remove(shaFile)
|
||||
}
|
||||
}
|
||||
shaFiles.each { shaFile ->
|
||||
logger.lifecycle("Removing unused sha ${shaFile.getName()}")
|
||||
Files.delete(shaFile.toPath())
|
||||
}
|
||||
}
|
||||
}
|
|
@ -317,12 +317,6 @@ class ClusterFormationTasks {
|
|||
// its run after plugins have been installed, as the extra config files may belong to plugins
|
||||
setup = configureExtraConfigFilesTask(taskName(prefix, node, 'extraConfig'), project, setup, node)
|
||||
|
||||
// If the node runs in a FIPS 140-2 JVM, the BCFKS default keystore will be password protected
|
||||
if (project.inFipsJvm){
|
||||
node.config.systemProperties.put('javax.net.ssl.trustStorePassword', 'password')
|
||||
node.config.systemProperties.put('javax.net.ssl.keyStorePassword', 'password')
|
||||
}
|
||||
|
||||
// extra setup commands
|
||||
for (Map.Entry<String, Object[]> command : node.config.setupCommands.entrySet()) {
|
||||
// the first argument is the actual script name, relative to home
|
||||
|
@ -430,16 +424,17 @@ class ClusterFormationTasks {
|
|||
if (node.nodeVersion.major >= 7) {
|
||||
esConfig['indices.breaker.total.use_real_memory'] = false
|
||||
}
|
||||
for (Map.Entry<String, Object> setting : node.config.settings) {
|
||||
if (setting.value == null) {
|
||||
esConfig.remove(setting.key)
|
||||
} else {
|
||||
esConfig.put(setting.key, setting.value)
|
||||
}
|
||||
}
|
||||
|
||||
Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)
|
||||
writeConfig.doFirst {
|
||||
for (Map.Entry<String, Object> setting : node.config.settings) {
|
||||
if (setting.value == null) {
|
||||
esConfig.remove(setting.key)
|
||||
} else {
|
||||
esConfig.put(setting.key, setting.value)
|
||||
}
|
||||
}
|
||||
|
||||
esConfig = configFilter.call(esConfig)
|
||||
File configFile = new File(node.pathConf, 'elasticsearch.yml')
|
||||
logger.info("Configuring ${configFile}")
|
||||
|
@ -690,8 +685,9 @@ class ClusterFormationTasks {
|
|||
static Task configureExecTask(String name, Project project, Task setup, NodeInfo node, Object[] execArgs) {
|
||||
return project.tasks.create(name: name, type: LoggedExec, dependsOn: setup) { Exec exec ->
|
||||
exec.workingDir node.cwd
|
||||
if (project.isRuntimeJavaHomeSet || node.nodeVersion.before(Version.fromString("7.0.0")) ||
|
||||
node.config.distribution == 'integ-test-zip') {
|
||||
if ((project.isRuntimeJavaHomeSet && node.isBwcNode == false) // runtime Java might not be compatible with old nodes
|
||||
|| node.nodeVersion.before(Version.fromString("7.0.0"))
|
||||
|| node.config.distribution == 'integ-test-zip') {
|
||||
exec.environment.put('JAVA_HOME', project.runtimeJavaHome)
|
||||
} else {
|
||||
// force JAVA_HOME to *not* be set
|
||||
|
@ -716,8 +712,9 @@ class ClusterFormationTasks {
|
|||
ant.exec(executable: node.executable, spawn: node.config.daemonize, newenvironment: true,
|
||||
dir: node.cwd, taskname: 'elasticsearch') {
|
||||
node.env.each { key, value -> env(key: key, value: value) }
|
||||
if (project.isRuntimeJavaHomeSet || node.nodeVersion.before(Version.fromString("7.0.0")) ||
|
||||
node.config.distribution == 'integ-test-zip') {
|
||||
if ((project.isRuntimeJavaHomeSet && node.isBwcNode == false) // runtime Java might not be compatible with old nodes
|
||||
|| node.nodeVersion.before(Version.fromString("7.0.0"))
|
||||
|| node.config.distribution == 'integ-test-zip') {
|
||||
env(key: 'JAVA_HOME', value: project.runtimeJavaHome)
|
||||
}
|
||||
node.args.each { arg(value: it) }
|
||||
|
@ -760,6 +757,12 @@ class ClusterFormationTasks {
|
|||
}
|
||||
start.doLast(elasticsearchRunner)
|
||||
start.doFirst {
|
||||
// If the node runs in a FIPS 140-2 JVM, the BCFKS default keystore will be password protected
|
||||
if (project.inFipsJvm){
|
||||
node.config.systemProperties.put('javax.net.ssl.trustStorePassword', 'password')
|
||||
node.config.systemProperties.put('javax.net.ssl.keyStorePassword', 'password')
|
||||
}
|
||||
|
||||
// Configure ES JAVA OPTS - adds system properties, assertion flags, remote debug etc
|
||||
List<String> esJavaOpts = [node.env.get('ES_JAVA_OPTS', '')]
|
||||
String collectedSystemProperties = node.config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")
|
||||
|
|
|
@ -1,63 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.test
|
||||
|
||||
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.artifacts.Dependency
|
||||
import org.gradle.api.artifacts.ProjectDependency
|
||||
import org.gradle.api.tasks.Copy
|
||||
|
||||
/**
|
||||
* A plugin to run messy tests, which are generally tests that depend on plugins.
|
||||
*
|
||||
* This plugin will add the same test configuration as standalone tests, except
|
||||
* also add the plugin-metadata and properties files for each plugin project
|
||||
* dependency.
|
||||
*/
|
||||
class MessyTestPlugin extends StandaloneTestPlugin {
|
||||
@Override
|
||||
public void apply(Project project) {
|
||||
super.apply(project)
|
||||
|
||||
project.configurations.testCompile.dependencies.all { Dependency dep ->
|
||||
// this closure is run every time a compile dependency is added
|
||||
if (dep instanceof ProjectDependency && dep.dependencyProject.plugins.hasPlugin(PluginBuildPlugin)) {
|
||||
project.gradle.projectsEvaluated {
|
||||
addPluginResources(project, dep.dependencyProject)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static addPluginResources(Project project, Project pluginProject) {
|
||||
String outputDir = "${project.buildDir}/generated-resources/${pluginProject.name}"
|
||||
String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata")
|
||||
Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class)
|
||||
copyPluginMetadata.into(outputDir)
|
||||
copyPluginMetadata.from(pluginProject.tasks.pluginProperties)
|
||||
copyPluginMetadata.from(pluginProject.file('src/main/plugin-metadata'))
|
||||
project.sourceSets.test.output.dir(outputDir, builtBy: taskName)
|
||||
|
||||
// add each generated dir to the test classpath in IDEs
|
||||
project.idea.module.singleEntryLibraries= ['TEST': [project.file(outputDir)]]
|
||||
// Eclipse doesn't need this because it gets the entire module as a dependency
|
||||
}
|
||||
}
|
|
@ -23,6 +23,7 @@ import com.sun.jna.Native
|
|||
import com.sun.jna.WString
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.elasticsearch.gradle.Version
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.gradle.api.Project
|
||||
|
||||
import java.nio.file.Files
|
||||
|
@ -107,6 +108,9 @@ class NodeInfo {
|
|||
/** the version of elasticsearch that this node runs */
|
||||
Version nodeVersion
|
||||
|
||||
/** true if the node is not the current version */
|
||||
boolean isBwcNode
|
||||
|
||||
/** Holds node configuration for part of a test cluster. */
|
||||
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, String nodeVersion, File sharedDir) {
|
||||
this.config = config
|
||||
|
@ -121,6 +125,7 @@ class NodeInfo {
|
|||
baseDir = new File(project.buildDir, "cluster/${prefix} node${nodeNum}")
|
||||
pidFile = new File(baseDir, 'es.pid')
|
||||
this.nodeVersion = Version.fromString(nodeVersion)
|
||||
this.isBwcNode = this.nodeVersion.before(VersionProperties.elasticsearch)
|
||||
homeDir = new File(baseDir, "elasticsearch-${nodeVersion}")
|
||||
pathConf = new File(homeDir, 'config')
|
||||
if (config.dataDir != null) {
|
||||
|
|
|
@ -86,50 +86,23 @@ class RestIntegTestTask extends DefaultTask {
|
|||
runner.include('**/*IT.class')
|
||||
runner.systemProperty('tests.rest.load_packaged', 'false')
|
||||
|
||||
/*
|
||||
* We use lazy-evaluated strings in order to configure system properties whose value will not be known until
|
||||
* execution time (e.g. cluster port numbers). Adding these via the normal DSL doesn't work as these get treated
|
||||
* as task inputs and therefore Gradle attempts to snapshot them before/after task execution. This fails due
|
||||
* to the GStrings containing references to non-serializable objects.
|
||||
*
|
||||
* We bypass this by instead passing this system properties vi a CommandLineArgumentProvider. This has the added
|
||||
* side-effect that these properties are NOT treated as inputs, therefore they don't influence things like the
|
||||
* build cache key or up to date checking.
|
||||
*/
|
||||
def nonInputProperties = new CommandLineArgumentProvider() {
|
||||
private final Map<String, Object> systemProperties = [:]
|
||||
|
||||
void systemProperty(String key, Object value) {
|
||||
systemProperties.put(key, value)
|
||||
}
|
||||
|
||||
@Override
|
||||
Iterable<String> asArguments() {
|
||||
return systemProperties.collect { key, value ->
|
||||
"-D${key}=${value.toString()}".toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
runner.jvmArgumentProviders.add(nonInputProperties)
|
||||
runner.ext.nonInputProperties = nonInputProperties
|
||||
|
||||
if (System.getProperty("tests.rest.cluster") == null) {
|
||||
if (System.getProperty("tests.cluster") != null) {
|
||||
throw new IllegalArgumentException("tests.rest.cluster and tests.cluster must both be null or non-null")
|
||||
}
|
||||
if (usesTestclusters == true) {
|
||||
ElasticsearchCluster cluster = project.testClusters."${name}"
|
||||
nonInputProperties.systemProperty('tests.rest.cluster', "${-> cluster.allHttpSocketURI.join(",") }")
|
||||
nonInputProperties.systemProperty('tests.cluster', "${-> cluster.transportPortURI }")
|
||||
runner.nonInputProperties.systemProperty('tests.rest.cluster', "${-> cluster.allHttpSocketURI.join(",") }")
|
||||
runner.nonInputProperties.systemProperty('tests.cluster', "${-> cluster.transportPortURI }")
|
||||
} else {
|
||||
// we pass all nodes to the rest cluster to allow the clients to round-robin between them
|
||||
// this is more realistic than just talking to a single node
|
||||
nonInputProperties.systemProperty('tests.rest.cluster', "${-> nodes.collect { it.httpUri() }.join(",")}")
|
||||
nonInputProperties.systemProperty('tests.config.dir', "${-> nodes[0].pathConf}")
|
||||
runner.nonInputProperties.systemProperty('tests.rest.cluster', "${-> nodes.collect { it.httpUri() }.join(",")}")
|
||||
runner.nonInputProperties.systemProperty('tests.config.dir', "${-> nodes[0].pathConf}")
|
||||
// TODO: our "client" qa tests currently use the rest-test plugin. instead they should have their own plugin
|
||||
// that sets up the test cluster and passes this transport uri instead of http uri. Until then, we pass
|
||||
// both as separate sysprops
|
||||
nonInputProperties.systemProperty('tests.cluster', "${-> nodes[0].transportUri()}")
|
||||
runner.nonInputProperties.systemProperty('tests.cluster', "${-> nodes[0].transportUri()}")
|
||||
|
||||
// dump errors and warnings from cluster log on failure
|
||||
TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() {
|
||||
|
@ -276,7 +249,7 @@ class RestIntegTestTask extends DefaultTask {
|
|||
restSpec
|
||||
}
|
||||
project.dependencies {
|
||||
restSpec "org.elasticsearch:rest-api-spec:${VersionProperties.elasticsearch}"
|
||||
restSpec project.project(':rest-api-spec')
|
||||
}
|
||||
Task copyRestSpec = project.tasks.findByName('copyRestSpec')
|
||||
if (copyRestSpec != null) {
|
||||
|
|
|
@ -27,11 +27,14 @@ import org.elasticsearch.gradle.ExportElasticsearchBuildResourcesTask
|
|||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.precommit.PrecommitTasks
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.JavaVersion
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.artifacts.Configuration
|
||||
import org.gradle.api.plugins.ExtraPropertiesExtension
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.api.plugins.JavaPlugin
|
||||
import org.gradle.api.plugins.JavaPluginExtension
|
||||
import org.gradle.api.tasks.SourceSet
|
||||
import org.gradle.api.tasks.SourceSetContainer
|
||||
import org.gradle.api.tasks.compile.JavaCompile
|
||||
|
@ -57,11 +60,14 @@ class StandaloneRestTestPlugin implements Plugin<Project> {
|
|||
project.pluginManager.apply(JavaBasePlugin)
|
||||
|
||||
project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask)
|
||||
BuildPlugin.globalBuildInfo(project)
|
||||
BuildPlugin.configureRepositories(project)
|
||||
BuildPlugin.configureTestTasks(project)
|
||||
BuildPlugin.configureInputNormalization(project)
|
||||
|
||||
ExtraPropertiesExtension ext = project.extensions.getByType(ExtraPropertiesExtension)
|
||||
project.extensions.getByType(JavaPluginExtension).sourceCompatibility = ext.get('minimumRuntimeVersion') as JavaVersion
|
||||
project.extensions.getByType(JavaPluginExtension).targetCompatibility = ext.get('minimumRuntimeVersion') as JavaVersion
|
||||
|
||||
// only setup tests to build
|
||||
SourceSetContainer sourceSets = project.extensions.getByType(SourceSetContainer)
|
||||
SourceSet testSourceSet = sourceSets.create('test')
|
||||
|
@ -73,7 +79,7 @@ class StandaloneRestTestPlugin implements Plugin<Project> {
|
|||
|
||||
// create a compileOnly configuration as others might expect it
|
||||
project.configurations.create("compileOnly")
|
||||
project.dependencies.add('testCompile', "org.elasticsearch.test:framework:${VersionProperties.elasticsearch}")
|
||||
project.dependencies.add('testCompile', project.project(':test:framework'))
|
||||
|
||||
EclipseModel eclipse = project.extensions.getByType(EclipseModel)
|
||||
eclipse.classpath.sourceSets = [testSourceSet]
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
import org.gradle.internal.nativeintegration.filesystem.Chmod;
|
||||
|
||||
/**
|
||||
* Creates an empty directory.
|
||||
*/
|
||||
public class EmptyDirTask extends DefaultTask {
|
||||
|
||||
private File dir;
|
||||
private int dirMode = 0755;
|
||||
|
||||
/**
|
||||
* Creates an empty directory with the configured permissions.
|
||||
*/
|
||||
@TaskAction
|
||||
public void create() {
|
||||
dir.mkdirs();
|
||||
getChmod().chmod(dir, dirMode);
|
||||
}
|
||||
|
||||
@Inject
|
||||
public Chmod getChmod() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Input
|
||||
public File getDir() {
|
||||
return dir;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param dir The directory to create
|
||||
*/
|
||||
public void setDir(File dir) {
|
||||
this.dir = dir;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param dir The path of the directory to create. Takes a String and coerces it to a file.
|
||||
*/
|
||||
public void setDir(String dir) {
|
||||
this.dir = getProject().file(dir);
|
||||
}
|
||||
|
||||
@Input
|
||||
public int getDirMode() {
|
||||
return dirMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param dirMode The permissions to apply to the new directory
|
||||
*/
|
||||
public void setDirMode(int dirMode) {
|
||||
this.dirMode = dirMode;
|
||||
}
|
||||
|
||||
}
|
|
@ -23,6 +23,7 @@ import org.gradle.api.Action;
|
|||
import org.gradle.api.NamedDomainObjectContainer;
|
||||
import org.gradle.api.Plugin;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.Task;
|
||||
import org.gradle.api.UnknownTaskException;
|
||||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.artifacts.ConfigurationContainer;
|
||||
|
@ -165,7 +166,12 @@ public class JdkDownloadPlugin implements Plugin<Project> {
|
|||
}
|
||||
String extractDir = rootProject.getBuildDir().toPath().resolve("jdks/openjdk-" + jdkVersion + "_" + platform).toString();
|
||||
TaskProvider<Copy> extractTask = rootProject.getTasks().register(extractTaskName, Copy.class, copyTask -> {
|
||||
copyTask.doFirst(t -> rootProject.delete(extractDir));
|
||||
copyTask.doFirst(new Action<Task>() {
|
||||
@Override
|
||||
public void execute(Task t) {
|
||||
rootProject.delete(extractDir);
|
||||
}
|
||||
});
|
||||
copyTask.into(extractDir);
|
||||
copyTask.from(fileGetter, removeRootDir);
|
||||
});
|
||||
|
|
|
@ -0,0 +1,276 @@
|
|||
package org.elasticsearch.gradle.info;
|
||||
|
||||
import org.elasticsearch.gradle.OS;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.JavaVersion;
|
||||
import org.gradle.api.file.RegularFileProperty;
|
||||
import org.gradle.api.model.ObjectFactory;
|
||||
import org.gradle.api.tasks.CacheableTask;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.InputDirectory;
|
||||
import org.gradle.api.tasks.Nested;
|
||||
import org.gradle.api.tasks.OutputFile;
|
||||
import org.gradle.api.tasks.PathSensitive;
|
||||
import org.gradle.api.tasks.PathSensitivity;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
import org.gradle.internal.jvm.Jvm;
|
||||
import org.gradle.process.ExecResult;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.io.Writer;
|
||||
import java.nio.file.Files;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
|
||||
@CacheableTask
|
||||
public class GenerateGlobalBuildInfoTask extends DefaultTask {
|
||||
private JavaVersion minimumCompilerVersion;
|
||||
private JavaVersion minimumRuntimeVersion;
|
||||
private File compilerJavaHome;
|
||||
private File runtimeJavaHome;
|
||||
private List<JavaHome> javaVersions;
|
||||
private final RegularFileProperty outputFile;
|
||||
private final RegularFileProperty compilerVersionFile;
|
||||
private final RegularFileProperty runtimeVersionFile;
|
||||
private final RegularFileProperty fipsJvmFile;
|
||||
|
||||
@Inject
|
||||
public GenerateGlobalBuildInfoTask(ObjectFactory objectFactory) {
|
||||
this.outputFile = objectFactory.fileProperty();
|
||||
this.compilerVersionFile = objectFactory.fileProperty();
|
||||
this.runtimeVersionFile = objectFactory.fileProperty();
|
||||
this.fipsJvmFile = objectFactory.fileProperty();
|
||||
}
|
||||
|
||||
@Input
|
||||
public JavaVersion getMinimumCompilerVersion() {
|
||||
return minimumCompilerVersion;
|
||||
}
|
||||
|
||||
public void setMinimumCompilerVersion(JavaVersion minimumCompilerVersion) {
|
||||
this.minimumCompilerVersion = minimumCompilerVersion;
|
||||
}
|
||||
|
||||
@Input
|
||||
public JavaVersion getMinimumRuntimeVersion() {
|
||||
return minimumRuntimeVersion;
|
||||
}
|
||||
|
||||
public void setMinimumRuntimeVersion(JavaVersion minimumRuntimeVersion) {
|
||||
this.minimumRuntimeVersion = minimumRuntimeVersion;
|
||||
}
|
||||
|
||||
@InputDirectory
|
||||
@PathSensitive(PathSensitivity.RELATIVE)
|
||||
public File getCompilerJavaHome() {
|
||||
return compilerJavaHome;
|
||||
}
|
||||
|
||||
public void setCompilerJavaHome(File compilerJavaHome) {
|
||||
this.compilerJavaHome = compilerJavaHome;
|
||||
}
|
||||
|
||||
@InputDirectory
|
||||
@PathSensitive(PathSensitivity.RELATIVE)
|
||||
public File getRuntimeJavaHome() {
|
||||
return runtimeJavaHome;
|
||||
}
|
||||
|
||||
public void setRuntimeJavaHome(File runtimeJavaHome) {
|
||||
this.runtimeJavaHome = runtimeJavaHome;
|
||||
}
|
||||
|
||||
@Nested
|
||||
public List<JavaHome> getJavaVersions() {
|
||||
return javaVersions;
|
||||
}
|
||||
|
||||
public void setJavaVersions(List<JavaHome> javaVersions) {
|
||||
this.javaVersions = javaVersions;
|
||||
}
|
||||
|
||||
@OutputFile
|
||||
public RegularFileProperty getOutputFile() {
|
||||
return outputFile;
|
||||
}
|
||||
|
||||
@OutputFile
|
||||
public RegularFileProperty getCompilerVersionFile() {
|
||||
return compilerVersionFile;
|
||||
}
|
||||
|
||||
@OutputFile
|
||||
public RegularFileProperty getRuntimeVersionFile() {
|
||||
return runtimeVersionFile;
|
||||
}
|
||||
|
||||
@OutputFile
|
||||
public RegularFileProperty getFipsJvmFile() {
|
||||
return fipsJvmFile;
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void generate() {
|
||||
String javaVendor = System.getProperty("java.vendor");
|
||||
String gradleJavaVersion = System.getProperty("java.version");
|
||||
String gradleJavaVersionDetails = javaVendor + " " + gradleJavaVersion + " [" + System.getProperty("java.vm.name")
|
||||
+ " " + System.getProperty("java.vm.version") + "]";
|
||||
|
||||
String compilerJavaVersionDetails = gradleJavaVersionDetails;
|
||||
JavaVersion compilerJavaVersionEnum = JavaVersion.current();
|
||||
String runtimeJavaVersionDetails = gradleJavaVersionDetails;
|
||||
JavaVersion runtimeJavaVersionEnum = JavaVersion.current();
|
||||
File gradleJavaHome = Jvm.current().getJavaHome();
|
||||
boolean inFipsJvm = false;
|
||||
|
||||
try {
|
||||
if (Files.isSameFile(compilerJavaHome.toPath(), gradleJavaHome.toPath()) == false) {
|
||||
if (compilerJavaHome.exists()) {
|
||||
compilerJavaVersionDetails = findJavaVersionDetails(compilerJavaHome);
|
||||
compilerJavaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(compilerJavaHome));
|
||||
} else {
|
||||
throw new RuntimeException("Compiler Java home path of '" + compilerJavaHome + "' does not exist");
|
||||
}
|
||||
}
|
||||
|
||||
if (Files.isSameFile(runtimeJavaHome.toPath(), gradleJavaHome.toPath()) == false) {
|
||||
if (runtimeJavaHome.exists()) {
|
||||
runtimeJavaVersionDetails = findJavaVersionDetails(runtimeJavaHome);
|
||||
runtimeJavaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(runtimeJavaHome));
|
||||
|
||||
// We don't expect Gradle to be running in a FIPS JVM
|
||||
String inFipsJvmScript = "print(java.security.Security.getProviders()[0].name.toLowerCase().contains(\"fips\"));";
|
||||
inFipsJvm = Boolean.parseBoolean(runJavaAsScript(runtimeJavaHome, inFipsJvmScript));
|
||||
} else {
|
||||
throw new RuntimeException("Runtime Java home path of '" + compilerJavaHome + "' does not exist");
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
|
||||
try (BufferedWriter writer = new BufferedWriter(new FileWriter(outputFile.getAsFile().get()))) {
|
||||
writer.write(" Gradle Version : " + getProject().getGradle().getGradleVersion() + "\n");
|
||||
writer.write(" OS Info : " + System.getProperty("os.name") + " " + System.getProperty("os.version")
|
||||
+ " (" + System.getProperty("os.arch") + ")\n");
|
||||
if (gradleJavaVersionDetails.equals(compilerJavaVersionDetails) == false
|
||||
|| gradleJavaVersionDetails.equals(runtimeJavaVersionDetails) == false) {
|
||||
writer.write(" Compiler JDK Version : " + compilerJavaVersionEnum + " (" + compilerJavaVersionDetails + ")\n");
|
||||
writer.write(" Compiler java.home : " + compilerJavaHome + "\n");
|
||||
writer.write(" Runtime JDK Version : " + runtimeJavaVersionEnum + " (" + runtimeJavaVersionDetails + ")\n");
|
||||
writer.write(" Runtime java.home : " + runtimeJavaHome + "\n");
|
||||
writer.write(" Gradle JDK Version : " + JavaVersion.toVersion(gradleJavaVersion)
|
||||
+ " (" + gradleJavaVersionDetails + ")\n");
|
||||
writer.write(" Gradle java.home : " + gradleJavaHome);
|
||||
} else {
|
||||
writer.write(" JDK Version : " + JavaVersion.toVersion(gradleJavaVersion)
|
||||
+ " (" + gradleJavaVersionDetails + ")\n");
|
||||
writer.write(" JAVA_HOME : " + gradleJavaHome);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
|
||||
// enforce Java version
|
||||
if (compilerJavaVersionEnum.compareTo(minimumCompilerVersion) < 0) {
|
||||
String message = "The compiler java.home must be set to a JDK installation directory for Java " + minimumCompilerVersion +
|
||||
" but is [" + compilerJavaHome + "] corresponding to [" + compilerJavaVersionEnum + "]";
|
||||
throw new GradleException(message);
|
||||
}
|
||||
|
||||
if (runtimeJavaVersionEnum.compareTo(minimumRuntimeVersion) < 0) {
|
||||
String message = "The runtime java.home must be set to a JDK installation directory for Java " + minimumRuntimeVersion +
|
||||
" but is [" + runtimeJavaHome + "] corresponding to [" + runtimeJavaVersionEnum + "]";
|
||||
throw new GradleException(message);
|
||||
}
|
||||
|
||||
for (JavaHome javaVersion : javaVersions) {
|
||||
File javaHome = javaVersion.getJavaHome();
|
||||
if (javaHome == null) {
|
||||
continue;
|
||||
}
|
||||
JavaVersion javaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(javaHome));
|
||||
JavaVersion expectedJavaVersionEnum;
|
||||
int version = javaVersion.getVersion();
|
||||
if (version < 9) {
|
||||
expectedJavaVersionEnum = JavaVersion.toVersion("1." + version);
|
||||
} else {
|
||||
expectedJavaVersionEnum = JavaVersion.toVersion(Integer.toString(version));
|
||||
}
|
||||
if (javaVersionEnum != expectedJavaVersionEnum) {
|
||||
String message = "The environment variable JAVA" + version + "_HOME must be set to a JDK installation directory for Java " +
|
||||
expectedJavaVersionEnum + " but is [" + javaHome + "] corresponding to [" + javaVersionEnum + "]";
|
||||
throw new GradleException(message);
|
||||
}
|
||||
}
|
||||
|
||||
writeToFile(compilerVersionFile.getAsFile().get(), compilerJavaVersionEnum.name());
|
||||
writeToFile(runtimeVersionFile.getAsFile().get(), runtimeJavaVersionEnum.name());
|
||||
writeToFile(fipsJvmFile.getAsFile().get(), Boolean.toString(inFipsJvm));
|
||||
}
|
||||
|
||||
private void writeToFile(File file, String content) {
|
||||
try (Writer writer = new FileWriter(file)) {
|
||||
writer.write(content);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds printable java version of the given JAVA_HOME
|
||||
*/
|
||||
private String findJavaVersionDetails(File javaHome) {
|
||||
String versionInfoScript = "print(" +
|
||||
"java.lang.System.getProperty(\"java.vendor\") + \" \" + java.lang.System.getProperty(\"java.version\") + " +
|
||||
"\" [\" + java.lang.System.getProperty(\"java.vm.name\") + \" \" + java.lang.System.getProperty(\"java.vm.version\") + \"]\");";
|
||||
return runJavaAsScript(javaHome, versionInfoScript).trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the parsable java specification version
|
||||
*/
|
||||
private String findJavaSpecificationVersion(File javaHome) {
|
||||
String versionScript = "print(java.lang.System.getProperty(\"java.specification.version\"));";
|
||||
return runJavaAsScript(javaHome, versionScript);
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs the given javascript using jjs from the jdk, and returns the output
|
||||
*/
|
||||
private String runJavaAsScript(File javaHome, String script) {
|
||||
ByteArrayOutputStream stdout = new ByteArrayOutputStream();
|
||||
ByteArrayOutputStream stderr = new ByteArrayOutputStream();
|
||||
if (OS.current() == OS.WINDOWS) {
|
||||
// gradle/groovy does not properly escape the double quote for windows
|
||||
script = script.replace("\"", "\\\"");
|
||||
}
|
||||
File jrunscriptPath = new File(javaHome, "bin/jrunscript");
|
||||
String finalScript = script;
|
||||
ExecResult result = getProject().exec(spec -> {
|
||||
spec.setExecutable(jrunscriptPath);
|
||||
spec.args("-e", finalScript);
|
||||
spec.setStandardOutput(stdout);
|
||||
spec.setErrorOutput(stderr);
|
||||
spec.setIgnoreExitValue(true);
|
||||
});
|
||||
|
||||
if (result.getExitValue() != 0) {
|
||||
getLogger().error("STDOUT:");
|
||||
Arrays.stream(stdout.toString(UTF_8).split(System.getProperty("line.separator"))).forEach(getLogger()::error);
|
||||
getLogger().error("STDERR:");
|
||||
Arrays.stream(stderr.toString(UTF_8).split(System.getProperty("line.separator"))).forEach(getLogger()::error);
|
||||
result.rethrowFailure();
|
||||
}
|
||||
return stdout.toString(UTF_8).trim();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,198 @@
|
|||
package org.elasticsearch.gradle.info;
|
||||
|
||||
import org.elasticsearch.gradle.OS;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.JavaVersion;
|
||||
import org.gradle.api.Plugin;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.plugins.ExtraPropertiesExtension;
|
||||
import org.gradle.internal.jvm.Jvm;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class GlobalBuildInfoPlugin implements Plugin<Project> {
|
||||
private static final String GLOBAL_INFO_EXTENSION_NAME = "globalInfo";
|
||||
private static Integer _defaultParallel = null;
|
||||
|
||||
@Override
|
||||
public void apply(Project project) {
|
||||
if (project != project.getRootProject()) {
|
||||
throw new IllegalStateException(this.getClass().getName() + " can only be applied to the root project.");
|
||||
}
|
||||
|
||||
GlobalInfoExtension extension = project.getExtensions().create(GLOBAL_INFO_EXTENSION_NAME, GlobalInfoExtension.class);
|
||||
|
||||
JavaVersion minimumCompilerVersion = JavaVersion.toVersion(getResourceContents("/minimumCompilerVersion"));
|
||||
JavaVersion minimumRuntimeVersion = JavaVersion.toVersion(getResourceContents("/minimumRuntimeVersion"));
|
||||
|
||||
File compilerJavaHome = findCompilerJavaHome();
|
||||
File runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome);
|
||||
|
||||
final List<JavaHome> javaVersions = new ArrayList<>();
|
||||
for (int version = 8; version <= Integer.parseInt(minimumCompilerVersion.getMajorVersion()); version++) {
|
||||
if (System.getenv(getJavaHomeEnvVarName(Integer.toString(version))) != null) {
|
||||
javaVersions.add(JavaHome.of(version, new File(findJavaHome(Integer.toString(version)))));
|
||||
}
|
||||
}
|
||||
|
||||
GenerateGlobalBuildInfoTask generateTask = project.getTasks().create("generateGlobalBuildInfo",
|
||||
GenerateGlobalBuildInfoTask.class, task -> {
|
||||
task.setJavaVersions(javaVersions);
|
||||
task.setMinimumCompilerVersion(minimumCompilerVersion);
|
||||
task.setMinimumRuntimeVersion(minimumRuntimeVersion);
|
||||
task.setCompilerJavaHome(compilerJavaHome);
|
||||
task.setRuntimeJavaHome(runtimeJavaHome);
|
||||
task.getOutputFile().set(new File(project.getBuildDir(), "global-build-info"));
|
||||
task.getCompilerVersionFile().set(new File(project.getBuildDir(), "java-compiler-version"));
|
||||
task.getRuntimeVersionFile().set(new File(project.getBuildDir(), "java-runtime-version"));
|
||||
task.getFipsJvmFile().set(new File(project.getBuildDir(), "in-fips-jvm"));
|
||||
});
|
||||
|
||||
PrintGlobalBuildInfoTask printTask = project.getTasks().create("printGlobalBuildInfo", PrintGlobalBuildInfoTask.class, task -> {
|
||||
task.getBuildInfoFile().set(generateTask.getOutputFile());
|
||||
task.getCompilerVersionFile().set(generateTask.getCompilerVersionFile());
|
||||
task.getRuntimeVersionFile().set(generateTask.getRuntimeVersionFile());
|
||||
task.getFipsJvmFile().set(generateTask.getFipsJvmFile());
|
||||
task.setGlobalInfoListeners(extension.listeners);
|
||||
});
|
||||
|
||||
project.getExtensions().getByType(ExtraPropertiesExtension.class).set("defaultParallel", findDefaultParallel(project));
|
||||
|
||||
project.allprojects(p -> {
|
||||
// Make sure than any task execution generates and prints build info
|
||||
p.getTasks().all(task -> {
|
||||
if (task != generateTask && task != printTask) {
|
||||
task.dependsOn(printTask);
|
||||
}
|
||||
});
|
||||
|
||||
ExtraPropertiesExtension ext = p.getExtensions().getByType(ExtraPropertiesExtension.class);
|
||||
|
||||
ext.set("compilerJavaHome", compilerJavaHome);
|
||||
ext.set("runtimeJavaHome", runtimeJavaHome);
|
||||
ext.set("isRuntimeJavaHomeSet", compilerJavaHome.equals(runtimeJavaHome) == false);
|
||||
ext.set("javaVersions", javaVersions);
|
||||
ext.set("minimumCompilerVersion", minimumCompilerVersion);
|
||||
ext.set("minimumRuntimeVersion", minimumRuntimeVersion);
|
||||
ext.set("gradleJavaVersion", Jvm.current().getJavaVersion());
|
||||
});
|
||||
}
|
||||
|
||||
private static File findCompilerJavaHome() {
|
||||
String compilerJavaHome = System.getenv("JAVA_HOME");
|
||||
String compilerJavaProperty = System.getProperty("compiler.java");
|
||||
|
||||
if (compilerJavaProperty != null) {
|
||||
compilerJavaHome = findJavaHome(compilerJavaProperty);
|
||||
}
|
||||
|
||||
// if JAVA_HOME is not set,so we use the JDK that Gradle was run with.
|
||||
return compilerJavaHome == null ? Jvm.current().getJavaHome() : new File(compilerJavaHome);
|
||||
}
|
||||
|
||||
private static File findRuntimeJavaHome(final File compilerJavaHome) {
|
||||
String runtimeJavaProperty = System.getProperty("runtime.java");
|
||||
|
||||
if (runtimeJavaProperty != null) {
|
||||
return new File(findJavaHome(runtimeJavaProperty));
|
||||
}
|
||||
|
||||
return System.getenv("RUNTIME_JAVA_HOME") == null ? compilerJavaHome : new File(System.getenv("RUNTIME_JAVA_HOME"));
|
||||
}
|
||||
|
||||
private static String findJavaHome(String version) {
|
||||
String versionedJavaHome = System.getenv(getJavaHomeEnvVarName(version));
|
||||
if (versionedJavaHome == null) {
|
||||
throw new GradleException(
|
||||
"$versionedVarName must be set to build Elasticsearch. " +
|
||||
"Note that if the variable was just set you might have to run `./gradlew --stop` for " +
|
||||
"it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details."
|
||||
);
|
||||
}
|
||||
return versionedJavaHome;
|
||||
}
|
||||
|
||||
private static String getJavaHomeEnvVarName(String version) {
|
||||
return "JAVA" + version + "_HOME";
|
||||
}
|
||||
|
||||
private static String getResourceContents(String resourcePath) {
|
||||
try (BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(GlobalBuildInfoPlugin.class.getResourceAsStream(resourcePath))
|
||||
)) {
|
||||
StringBuilder b = new StringBuilder();
|
||||
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
|
||||
if (b.length() != 0) {
|
||||
b.append('\n');
|
||||
}
|
||||
b.append(line);
|
||||
}
|
||||
|
||||
return b.toString();
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException("Error trying to read classpath resource: " + resourcePath, e);
|
||||
}
|
||||
}
|
||||
|
||||
private static int findDefaultParallel(Project project) {
|
||||
// Since it costs IO to compute this, and is done at configuration time we want to cache this if possible
|
||||
// It's safe to store this in a static variable since it's just a primitive so leaking memory isn't an issue
|
||||
if (_defaultParallel == null) {
|
||||
File cpuInfoFile = new File("/proc/cpuinfo");
|
||||
if (cpuInfoFile.exists()) {
|
||||
// Count physical cores on any Linux distro ( don't count hyper-threading )
|
||||
Map<String, Integer> socketToCore = new HashMap<>();
|
||||
String currentID = "";
|
||||
|
||||
try (BufferedReader reader = new BufferedReader(new FileReader(cpuInfoFile))) {
|
||||
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
|
||||
if (line.contains(":")) {
|
||||
List<String> parts = Arrays.stream(line.split(":", 2)).map(String::trim).collect(Collectors.toList());
|
||||
String name = parts.get(0);
|
||||
String value = parts.get(1);
|
||||
// the ID of the CPU socket
|
||||
if (name.equals("physical id")) {
|
||||
currentID = value;
|
||||
}
|
||||
// Number of cores not including hyper-threading
|
||||
if (name.equals("cpu cores")) {
|
||||
assert currentID.isEmpty() == false;
|
||||
socketToCore.put("currentID", Integer.valueOf(value));
|
||||
currentID = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
_defaultParallel = socketToCore.values().stream().mapToInt(i -> i).sum();
|
||||
} else if (OS.current() == OS.MAC) {
|
||||
// Ask macOS to count physical CPUs for us
|
||||
ByteArrayOutputStream stdout = new ByteArrayOutputStream();
|
||||
project.exec(spec -> {
|
||||
spec.setExecutable("sysctl");
|
||||
spec.args("-n", "hw.physicalcpu");
|
||||
spec.setStandardOutput(stdout);
|
||||
});
|
||||
|
||||
_defaultParallel = Integer.parseInt(stdout.toString().trim());
|
||||
}
|
||||
|
||||
_defaultParallel = Runtime.getRuntime().availableProcessors() / 2;
|
||||
}
|
||||
|
||||
return _defaultParallel;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package org.elasticsearch.gradle.info;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class GlobalInfoExtension {
|
||||
final List<Runnable> listeners = new ArrayList<>();
|
||||
|
||||
public void ready(Runnable block) {
|
||||
listeners.add(block);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
package org.elasticsearch.gradle.info;
|
||||
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.InputDirectory;
|
||||
import org.gradle.api.tasks.Optional;
|
||||
import org.gradle.api.tasks.PathSensitive;
|
||||
import org.gradle.api.tasks.PathSensitivity;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
public class JavaHome {
|
||||
private Integer version;
|
||||
private File javaHome;
|
||||
|
||||
private JavaHome(int version, File javaHome) {
|
||||
this.version = version;
|
||||
this.javaHome = javaHome;
|
||||
}
|
||||
|
||||
public static JavaHome of(int version, File javaHome) {
|
||||
return new JavaHome(version, javaHome);
|
||||
}
|
||||
|
||||
@Input
|
||||
public Integer getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
@InputDirectory
|
||||
@Optional
|
||||
@PathSensitive(PathSensitivity.RELATIVE)
|
||||
public File getJavaHome() {
|
||||
return javaHome;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
package org.elasticsearch.gradle.info;
|
||||
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.JavaVersion;
|
||||
import org.gradle.api.file.RegularFileProperty;
|
||||
import org.gradle.api.model.ObjectFactory;
|
||||
import org.gradle.api.plugins.ExtraPropertiesExtension;
|
||||
import org.gradle.api.resources.TextResource;
|
||||
import org.gradle.api.tasks.InputFile;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class PrintGlobalBuildInfoTask extends DefaultTask {
|
||||
private final RegularFileProperty buildInfoFile;
|
||||
private final RegularFileProperty compilerVersionFile;
|
||||
private final RegularFileProperty runtimeVersionFile;
|
||||
private final RegularFileProperty fipsJvmFile;
|
||||
private List<Runnable> globalInfoListeners = new ArrayList<>();
|
||||
|
||||
@Inject
|
||||
public PrintGlobalBuildInfoTask(ObjectFactory objectFactory) {
|
||||
this.buildInfoFile = objectFactory.fileProperty();
|
||||
this.compilerVersionFile = objectFactory.fileProperty();
|
||||
this.runtimeVersionFile = objectFactory.fileProperty();
|
||||
this.fipsJvmFile = objectFactory.fileProperty();
|
||||
}
|
||||
|
||||
@InputFile
|
||||
public RegularFileProperty getBuildInfoFile() {
|
||||
return buildInfoFile;
|
||||
}
|
||||
|
||||
@InputFile
|
||||
public RegularFileProperty getCompilerVersionFile() {
|
||||
return compilerVersionFile;
|
||||
}
|
||||
|
||||
@InputFile
|
||||
public RegularFileProperty getRuntimeVersionFile() {
|
||||
return runtimeVersionFile;
|
||||
}
|
||||
|
||||
@InputFile
|
||||
public RegularFileProperty getFipsJvmFile() {
|
||||
return fipsJvmFile;
|
||||
}
|
||||
|
||||
public void setGlobalInfoListeners(List<Runnable> globalInfoListeners) {
|
||||
this.globalInfoListeners = globalInfoListeners;
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void print() {
|
||||
getLogger().quiet("=======================================");
|
||||
getLogger().quiet("Elasticsearch Build Hamster says Hello!");
|
||||
getLogger().quiet(getFileText(getBuildInfoFile()).asString());
|
||||
getLogger().quiet(" Random Testing Seed : " + getProject().property("testSeed"));
|
||||
getLogger().quiet("=======================================");
|
||||
|
||||
setGlobalProperties();
|
||||
globalInfoListeners.forEach(Runnable::run);
|
||||
|
||||
// Since all tasks depend on this task, and it always runs for every build, this makes sure that lifecycle tasks will still
|
||||
// correctly report as UP-TO-DATE, since the convention is a lifecycle task (i.e. assemble, build, etc) will only be marked as
|
||||
// UP-TO-DATE if all upstream tasks were also UP-TO-DATE.
|
||||
setDidWork(false);
|
||||
}
|
||||
|
||||
private TextResource getFileText(RegularFileProperty regularFileProperty) {
|
||||
return getProject().getResources().getText().fromFile(regularFileProperty.getAsFile().get());
|
||||
}
|
||||
|
||||
private void setGlobalProperties() {
|
||||
getProject().getRootProject().allprojects(p -> {
|
||||
ExtraPropertiesExtension ext = p.getExtensions().getByType(ExtraPropertiesExtension.class);
|
||||
ext.set("compilerJavaVersion", JavaVersion.valueOf(getFileText(getCompilerVersionFile()).asString()));
|
||||
ext.set("runtimeJavaVersion", JavaVersion.valueOf(getFileText(getRuntimeVersionFile()).asString()));
|
||||
ext.set("inFipsJvm", Boolean.valueOf(getFileText(getFipsJvmFile()).asString()));
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,151 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.plugin;
|
||||
|
||||
import org.gradle.api.Project;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A container for plugin properties that will be written to the plugin descriptor, for easy
|
||||
* manipulation in the gradle DSL.
|
||||
*/
|
||||
public class PluginPropertiesExtension {
|
||||
private String name;
|
||||
|
||||
private String version;
|
||||
|
||||
private String description;
|
||||
|
||||
private String classname;
|
||||
|
||||
/** Other plugins this plugin extends through SPI */
|
||||
private List<String> extendedPlugins = new ArrayList<>();
|
||||
|
||||
private boolean hasNativeController;
|
||||
|
||||
/** True if the plugin requires the elasticsearch keystore to exist, false otherwise. */
|
||||
private boolean requiresKeystore;
|
||||
|
||||
/** A license file that should be included in the built plugin zip. */
|
||||
private File licenseFile;
|
||||
|
||||
private boolean hasClientJar = false;
|
||||
|
||||
/**
|
||||
* A notice file that should be included in the built plugin zip. This will be
|
||||
* extended with notices from the {@code licenses/} directory.
|
||||
*/
|
||||
private File noticeFile;
|
||||
|
||||
private final Project project;
|
||||
|
||||
public PluginPropertiesExtension(Project project) {
|
||||
this.project = project;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name == null ? project.getName() : name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getVersion() {
|
||||
return version == null ? project.getVersion().toString() : version;
|
||||
}
|
||||
|
||||
public void setVersion(String version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getClassname() {
|
||||
return classname;
|
||||
}
|
||||
|
||||
public void setClassname(String classname) {
|
||||
this.classname = classname;
|
||||
}
|
||||
|
||||
public List<String> getExtendedPlugins() {
|
||||
return this.extendedPlugins;
|
||||
}
|
||||
|
||||
public boolean isHasNativeController() {
|
||||
return hasNativeController;
|
||||
}
|
||||
|
||||
public void setHasNativeController(boolean hasNativeController) {
|
||||
this.hasNativeController = hasNativeController;
|
||||
}
|
||||
|
||||
public boolean isRequiresKeystore() {
|
||||
return requiresKeystore;
|
||||
}
|
||||
|
||||
public void setRequiresKeystore(boolean requiresKeystore) {
|
||||
this.requiresKeystore = requiresKeystore;
|
||||
}
|
||||
|
||||
public File getLicenseFile() {
|
||||
return licenseFile;
|
||||
}
|
||||
|
||||
public void setLicenseFile(File licenseFile) {
|
||||
this.project.getExtensions().getExtraProperties().set("licenseFile", licenseFile);
|
||||
this.licenseFile = licenseFile;
|
||||
}
|
||||
|
||||
public File getNoticeFile() {
|
||||
return noticeFile;
|
||||
}
|
||||
|
||||
public void setNoticeFile(File noticeFile) {
|
||||
this.project.getExtensions().getExtraProperties().set("noticeFile", noticeFile);
|
||||
this.noticeFile = noticeFile;
|
||||
}
|
||||
|
||||
public Project getProject() {
|
||||
return project;
|
||||
}
|
||||
|
||||
public void setExtendedPlugins(List<String> extendedPlugins) {
|
||||
this.extendedPlugins = extendedPlugins;
|
||||
}
|
||||
|
||||
public boolean isHasClientJar() {
|
||||
return hasClientJar;
|
||||
}
|
||||
|
||||
public void setHasClientJar(boolean hasClientJar) {
|
||||
this.hasClientJar = hasClientJar;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,328 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.apache.commons.codec.binary.Hex;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.InvalidUserDataException;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.InputDirectory;
|
||||
import org.gradle.api.tasks.InputFiles;
|
||||
import org.gradle.api.tasks.Optional;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A task to check licenses for dependencies.
|
||||
*
|
||||
* There are two parts to the check:
|
||||
* <ul>
|
||||
* <li>LICENSE and NOTICE files</li>
|
||||
* <li>SHA checksums for each dependency jar</li>
|
||||
* </ul>
|
||||
*
|
||||
* The directory to find the license and sha files in defaults to the dir @{code licenses}
|
||||
* in the project directory for this task. You can override this directory:
|
||||
* <pre>
|
||||
* dependencyLicenses {
|
||||
* licensesDir = getProject().file("mybetterlicensedir")
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* The jar files to check default to the dependencies from the default configuration. You
|
||||
* can override this, for example, to only check compile dependencies:
|
||||
* <pre>
|
||||
* dependencyLicenses {
|
||||
* dependencies = getProject().configurations.compile
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* Every jar must have a {@code .sha1} file in the licenses dir. These can be managed
|
||||
* automatically using the {@code updateShas} helper task that is created along
|
||||
* with this task. It will add {@code .sha1} files for new jars that are in dependencies
|
||||
* and remove old {@code .sha1} files that are no longer needed.
|
||||
*
|
||||
* Every jar must also have a LICENSE and NOTICE file. However, multiple jars can share
|
||||
* LICENSE and NOTICE files by mapping a pattern to the same name.
|
||||
* <pre>
|
||||
* dependencyLicenses {
|
||||
* mapping from: /lucene-.*/, to: "lucene"
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public class DependencyLicensesTask extends DefaultTask {
|
||||
|
||||
private final Pattern regex = Pattern.compile("-v?\\d+.*");
|
||||
|
||||
private final Logger logger = Logging.getLogger(getClass());
|
||||
|
||||
private static final String SHA_EXTENSION = ".sha1";
|
||||
|
||||
// TODO: we should be able to default this to eg compile deps, but we need to move the licenses
|
||||
// check from distribution to core (ie this should only be run on java projects)
|
||||
/** A collection of jar files that should be checked. */
|
||||
private FileCollection dependencies;
|
||||
|
||||
/** The directory to find the license and sha files in. */
|
||||
private File licensesDir = new File(getProject().getProjectDir(), "licenses");
|
||||
|
||||
/** A map of patterns to prefix, used to find the LICENSE and NOTICE file. */
|
||||
private Map<String, String> mappings = new LinkedHashMap<>();
|
||||
|
||||
/** Names of dependencies whose shas should not exist. */
|
||||
private Set<String> ignoreShas = new HashSet<>();
|
||||
|
||||
/**
|
||||
* Add a mapping from a regex pattern for the jar name, to a prefix to find
|
||||
* the LICENSE and NOTICE file for that jar.
|
||||
*/
|
||||
public void mapping(Map<String, String> props) {
|
||||
String from = props.remove("from");
|
||||
if (from == null) {
|
||||
throw new InvalidUserDataException("Missing \"from\" setting for license name mapping");
|
||||
}
|
||||
String to = props.remove("to");
|
||||
if (to == null) {
|
||||
throw new InvalidUserDataException("Missing \"to\" setting for license name mapping");
|
||||
}
|
||||
if (props.isEmpty() == false) {
|
||||
throw new InvalidUserDataException("Unknown properties for mapping on dependencyLicenses: " + props.keySet());
|
||||
}
|
||||
mappings.put(from, to);
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
public FileCollection getDependencies() {
|
||||
return dependencies;
|
||||
}
|
||||
|
||||
public void setDependencies(FileCollection dependencies) {
|
||||
this.dependencies = dependencies;
|
||||
}
|
||||
|
||||
@Optional
|
||||
@InputDirectory
|
||||
public File getLicensesDir() {
|
||||
if (licensesDir.exists()) {
|
||||
return licensesDir;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public void setLicensesDir(File licensesDir) {
|
||||
this.licensesDir = licensesDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a rule which will skip SHA checking for the given dependency name. This should be used for
|
||||
* locally build dependencies, which cause the sha to change constantly.
|
||||
*/
|
||||
public void ignoreSha(String dep) {
|
||||
ignoreShas.add(dep);
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void checkDependencies() throws IOException, NoSuchAlgorithmException {
|
||||
if (dependencies == null) {
|
||||
throw new GradleException("No dependencies variable defined.");
|
||||
}
|
||||
|
||||
if (dependencies.isEmpty()) {
|
||||
if (licensesDir.exists()) {
|
||||
throw new GradleException("Licenses dir " + licensesDir + " exists, but there are no dependencies");
|
||||
}
|
||||
return; // no dependencies to check
|
||||
} else if (licensesDir.exists() == false) {
|
||||
throw new GradleException("Licences dir " + licensesDir + " does not exist, but there are dependencies");
|
||||
}
|
||||
|
||||
Map<String, Boolean> licenses = new HashMap<>();
|
||||
Map<String, Boolean> notices = new HashMap<>();
|
||||
Set<File> shaFiles = new HashSet<>();
|
||||
|
||||
for (File file : licensesDir.listFiles()) {
|
||||
String name = file.getName();
|
||||
if (name.endsWith(SHA_EXTENSION)) {
|
||||
shaFiles.add(file);
|
||||
} else if (name.endsWith("-LICENSE") || name.endsWith("-LICENSE.txt")) {
|
||||
// TODO: why do we support suffix of LICENSE *and* LICENSE.txt??
|
||||
licenses.put(name, false);
|
||||
} else if (name.contains("-NOTICE") || name.contains("-NOTICE.txt")) {
|
||||
notices.put(name, false);
|
||||
}
|
||||
}
|
||||
|
||||
checkDependencies(licenses, notices, shaFiles);
|
||||
|
||||
licenses.forEach((item, exists) -> failIfAnyMissing(item, exists, "license"));
|
||||
|
||||
notices.forEach((item, exists) -> failIfAnyMissing(item, exists, "notice"));
|
||||
|
||||
if (shaFiles.isEmpty() == false) {
|
||||
throw new GradleException("Unused sha files found: \n" + joinFilenames(shaFiles));
|
||||
}
|
||||
}
|
||||
|
||||
private void failIfAnyMissing(String item, Boolean exists, String type) {
|
||||
if (exists == false) {
|
||||
throw new GradleException("Unused " + type + " " + item);
|
||||
}
|
||||
}
|
||||
|
||||
private void checkDependencies(Map<String, Boolean> licenses, Map<String, Boolean> notices, Set<File> shaFiles)
|
||||
throws NoSuchAlgorithmException, IOException {
|
||||
for (File dependency : dependencies) {
|
||||
String jarName = dependency.getName();
|
||||
String depName = regex.matcher(jarName).replaceFirst("");
|
||||
|
||||
validateSha(shaFiles, dependency, jarName, depName);
|
||||
|
||||
String dependencyName = getDependencyName(mappings, depName);
|
||||
logger.info("mapped dependency name {} to {} for license/notice check", depName, dependencyName);
|
||||
checkFile(dependencyName, jarName, licenses, "LICENSE");
|
||||
checkFile(dependencyName, jarName, notices, "NOTICE");
|
||||
}
|
||||
}
|
||||
|
||||
private void validateSha(Set<File> shaFiles, File dependency, String jarName, String depName)
|
||||
throws NoSuchAlgorithmException, IOException {
|
||||
if (ignoreShas.contains(depName)) {
|
||||
// local deps should not have sha files!
|
||||
if (getShaFile(jarName).exists()) {
|
||||
throw new GradleException("SHA file " + getShaFile(jarName) + " exists for ignored dependency " + depName);
|
||||
}
|
||||
} else {
|
||||
logger.info("Checking sha for {}", jarName);
|
||||
checkSha(dependency, jarName, shaFiles);
|
||||
}
|
||||
}
|
||||
|
||||
private String joinFilenames(Set<File> shaFiles) {
|
||||
List<String> names = shaFiles.stream().map(File::getName).collect(Collectors.toList());
|
||||
return String.join("\n", names);
|
||||
}
|
||||
|
||||
public static String getDependencyName(Map<String, String> mappings, String dependencyName) {
|
||||
// order is the same for keys and values iteration since we use a linked hashmap
|
||||
List<String> mapped = new ArrayList<>(mappings.values());
|
||||
Pattern mappingsPattern = Pattern.compile("(" + String.join(")|(", mappings.keySet()) + ")");
|
||||
Matcher match = mappingsPattern.matcher(dependencyName);
|
||||
if (match.matches()) {
|
||||
int i = 0;
|
||||
while (i < match.groupCount() && match.group(i + 1) == null) {
|
||||
++i;
|
||||
}
|
||||
return mapped.get(i);
|
||||
}
|
||||
return dependencyName;
|
||||
}
|
||||
|
||||
private void checkSha(File jar, String jarName, Set<File> shaFiles) throws NoSuchAlgorithmException, IOException {
|
||||
File shaFile = getShaFile(jarName);
|
||||
if (shaFile.exists() == false) {
|
||||
throw new GradleException("Missing SHA for " + jarName + ". Run \"gradle updateSHAs\" to create them");
|
||||
}
|
||||
|
||||
// TODO: shouldn't have to trim, sha files should not have trailing newline
|
||||
byte[] fileBytes = Files.readAllBytes(shaFile.toPath());
|
||||
String expectedSha = new String(fileBytes, StandardCharsets.UTF_8).trim();
|
||||
|
||||
String sha = getSha1(jar);
|
||||
|
||||
if (expectedSha.equals(sha) == false) {
|
||||
throw new GradleException(
|
||||
"SHA has changed! Expected " + expectedSha + " for " + jarName + " but got " + sha + ". " +
|
||||
"\nThis usually indicates a corrupt dependency cache or artifacts changed upstream." +
|
||||
"\nEither wipe your cache, fix the upstream artifact, or delete " + shaFile + " and run updateShas");
|
||||
}
|
||||
shaFiles.remove(shaFile);
|
||||
}
|
||||
|
||||
private void checkFile(String name, String jarName, Map<String, Boolean> counters, String type) {
|
||||
String fileName = getFileName(name, counters, type);
|
||||
|
||||
if (counters.containsKey(fileName) == false) {
|
||||
throw new GradleException("Missing " + type + " for " + jarName + ", expected in " + fileName);
|
||||
}
|
||||
|
||||
counters.put(fileName, true);
|
||||
}
|
||||
|
||||
private String getFileName(String name, Map<String, ?> counters, String type) {
|
||||
String fileName = name + "-" + type;
|
||||
|
||||
if (counters.containsKey(fileName) == false) {
|
||||
// try the other suffix...TODO: get rid of this, just support ending in .txt
|
||||
return fileName + ".txt";
|
||||
}
|
||||
|
||||
return fileName;
|
||||
}
|
||||
|
||||
@Input
|
||||
public LinkedHashMap<String, String> getMappings() {
|
||||
return new LinkedHashMap<>(mappings);
|
||||
}
|
||||
|
||||
File getShaFile(String jarName) {
|
||||
return new File(licensesDir, jarName + SHA_EXTENSION);
|
||||
}
|
||||
|
||||
Set<File> getShaFiles() {
|
||||
File[] array = licensesDir.listFiles();
|
||||
if (array == null) {
|
||||
throw new GradleException("\"" + licensesDir.getPath() + "\" isn't a valid directory");
|
||||
}
|
||||
|
||||
return Arrays.stream(array)
|
||||
.filter(file -> file.getName().endsWith(SHA_EXTENSION))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
String getSha1(File file) throws IOException, NoSuchAlgorithmException {
|
||||
byte[] bytes = Files.readAllBytes(file.toPath());
|
||||
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-1");
|
||||
char[] encoded = Hex.encodeHex(digest.digest(bytes));
|
||||
return String.copyValueOf(encoded);
|
||||
}
|
||||
|
||||
}
|
|
@ -21,19 +21,20 @@ package org.elasticsearch.gradle.precommit;
|
|||
|
||||
import org.elasticsearch.gradle.LoggedExec;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.tasks.Classpath;
|
||||
import org.gradle.api.tasks.Input;
|
||||
import org.gradle.api.tasks.CacheableTask;
|
||||
import org.gradle.api.tasks.CompileClasspath;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
* Runs CheckJarHell on a classpath.
|
||||
*/
|
||||
@CacheableTask
|
||||
public class JarHellTask extends PrecommitTask {
|
||||
|
||||
private FileCollection classpath;
|
||||
|
||||
private Object javaHome;
|
||||
|
||||
public JarHellTask() {
|
||||
setDescription("Runs CheckJarHell on the configured classpath");
|
||||
}
|
||||
|
@ -42,23 +43,15 @@ public class JarHellTask extends PrecommitTask {
|
|||
public void runJarHellCheck() {
|
||||
LoggedExec.javaexec(getProject(), spec -> {
|
||||
spec.classpath(getClasspath());
|
||||
spec.executable(getJavaHome() + "/bin/java");
|
||||
spec.setMain("org.elasticsearch.bootstrap.JarHell");
|
||||
});
|
||||
}
|
||||
|
||||
@Input
|
||||
public Object getJavaHome() {
|
||||
return javaHome;
|
||||
}
|
||||
|
||||
public void setJavaHome(Object javaHome) {
|
||||
this.javaHome = javaHome;
|
||||
}
|
||||
|
||||
@Classpath
|
||||
// We use compile classpath normalization here because class implementation changes are irrelevant for the purposes of jar hell.
|
||||
// We only care about the runtime classpath ABI here.
|
||||
@CompileClasspath
|
||||
public FileCollection getClasspath() {
|
||||
return classpath.filter(file -> file.exists());
|
||||
return classpath.filter(File::exists);
|
||||
}
|
||||
|
||||
public void setClasspath(FileCollection classpath) {
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.gradle.api.JavaVersion;
|
|||
import org.gradle.api.artifacts.Configuration;
|
||||
import org.gradle.api.artifacts.Dependency;
|
||||
import org.gradle.api.file.FileTree;
|
||||
import org.gradle.api.provider.Property;
|
||||
import org.gradle.api.specs.Spec;
|
||||
import org.gradle.api.tasks.CacheableTask;
|
||||
import org.gradle.api.tasks.Classpath;
|
||||
|
@ -79,17 +80,13 @@ public class ThirdPartyAuditTask extends DefaultTask {
|
|||
|
||||
private String javaHome;
|
||||
|
||||
private JavaVersion targetCompatibility;
|
||||
private final Property<JavaVersion> targetCompatibility = getProject().getObjects().property(JavaVersion.class);
|
||||
|
||||
@Input
|
||||
public JavaVersion getTargetCompatibility() {
|
||||
public Property<JavaVersion> getTargetCompatibility() {
|
||||
return targetCompatibility;
|
||||
}
|
||||
|
||||
public void setTargetCompatibility(JavaVersion targetCompatibility) {
|
||||
this.targetCompatibility = targetCompatibility;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
@PathSensitive(PathSensitivity.NAME_ONLY)
|
||||
public Configuration getForbiddenAPIsConfiguration() {
|
||||
|
@ -287,7 +284,7 @@ public class ThirdPartyAuditTask extends DefaultTask {
|
|||
// pther version specific implementation of said classes.
|
||||
IntStream.rangeClosed(
|
||||
Integer.parseInt(JavaVersion.VERSION_1_9.getMajorVersion()),
|
||||
Integer.parseInt(targetCompatibility.getMajorVersion())
|
||||
Integer.parseInt(targetCompatibility.get().getMajorVersion())
|
||||
).forEach(majorVersion -> getProject().copy(spec -> {
|
||||
spec.from(getProject().zipTree(jar));
|
||||
spec.into(jarExpandDir);
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.logging.Logger;
|
||||
import org.gradle.api.logging.Logging;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* A task to update shas used by {@code DependencyLicensesCheck}
|
||||
*/
|
||||
public class UpdateShasTask extends DefaultTask {
|
||||
|
||||
private final Logger logger = Logging.getLogger(getClass());
|
||||
|
||||
/** The parent dependency licenses task to use configuration from */
|
||||
private DependencyLicensesTask parentTask;
|
||||
|
||||
public UpdateShasTask() {
|
||||
setDescription("Updates the sha files for the dependencyLicenses check");
|
||||
setOnlyIf(element -> parentTask.getLicensesDir() != null);
|
||||
}
|
||||
|
||||
@TaskAction
|
||||
public void updateShas() throws NoSuchAlgorithmException, IOException {
|
||||
Set<File> shaFiles = parentTask.getShaFiles();
|
||||
|
||||
for (File dependency : parentTask.getDependencies()) {
|
||||
String jarName = dependency.getName();
|
||||
File shaFile = parentTask.getShaFile(jarName);
|
||||
|
||||
if (shaFile.exists() == false) {
|
||||
createSha(dependency, jarName, shaFile);
|
||||
} else {
|
||||
shaFiles.remove(shaFile);
|
||||
}
|
||||
}
|
||||
|
||||
for (File shaFile : shaFiles) {
|
||||
logger.lifecycle("Removing unused sha " + shaFile.getName());
|
||||
shaFile.delete();
|
||||
}
|
||||
}
|
||||
|
||||
private void createSha(File dependency, String jarName, File shaFile) throws IOException, NoSuchAlgorithmException {
|
||||
logger.lifecycle("Adding sha for " + jarName);
|
||||
|
||||
String sha = parentTask.getSha1(dependency);
|
||||
|
||||
Files.write(shaFile.toPath(), sha.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE);
|
||||
}
|
||||
|
||||
public DependencyLicensesTask getParentTask() {
|
||||
return parentTask;
|
||||
}
|
||||
|
||||
public void setParentTask(DependencyLicensesTask parentTask) {
|
||||
this.parentTask = parentTask;
|
||||
}
|
||||
}
|
|
@ -360,25 +360,23 @@ public class ElasticsearchNode implements TestClusterConfiguration {
|
|||
|
||||
private void installModules() {
|
||||
if (distribution == Distribution.INTEG_TEST) {
|
||||
modules.forEach(module -> services.copy(spec -> {
|
||||
if (module.getName().toLowerCase().endsWith(".zip")) {
|
||||
spec.from(services.zipTree(module));
|
||||
} else if (module.isDirectory()) {
|
||||
spec.from(module);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Not a valid module " + module + " for " + this);
|
||||
for (File module : modules) {
|
||||
Path destination = workingDir.resolve("modules").resolve(module.getName().replace(".zip", "").replace("-" + version, ""));
|
||||
|
||||
// only install modules that are not already bundled with the integ-test distribution
|
||||
if (Files.exists(destination) == false) {
|
||||
services.copy(spec -> {
|
||||
if (module.getName().toLowerCase().endsWith(".zip")) {
|
||||
spec.from(services.zipTree(module));
|
||||
} else if (module.isDirectory()) {
|
||||
spec.from(module);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Not a valid module " + module + " for " + this);
|
||||
}
|
||||
spec.into(destination);
|
||||
});
|
||||
}
|
||||
spec.into(
|
||||
workingDir
|
||||
.resolve("modules")
|
||||
.resolve(
|
||||
module.getName()
|
||||
.replace(".zip", "")
|
||||
.replace("-" + version, "")
|
||||
)
|
||||
.toFile()
|
||||
);
|
||||
}));
|
||||
}
|
||||
} else {
|
||||
LOGGER.info("Not installing " + modules.size() + "(s) since the " + distribution + " distribution already " +
|
||||
"has them");
|
||||
|
@ -411,6 +409,13 @@ public class ElasticsearchNode implements TestClusterConfiguration {
|
|||
}
|
||||
|
||||
private void runElaticsearchBinScriptWithInput(String input, String tool, String... args) {
|
||||
if (
|
||||
Files.exists(workingDir.resolve("bin").resolve(tool)) == false &&
|
||||
Files.exists(workingDir.resolve("bin").resolve(tool + ".bat")) == false
|
||||
) {
|
||||
throw new TestClustersException("Can't run bin script: `" + tool + "` does not exist. " +
|
||||
"Is this the distribution you expect it to be ?");
|
||||
}
|
||||
try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) {
|
||||
services.loggedExec(spec -> {
|
||||
spec.setEnvironment(getESEnvironment());
|
||||
|
|
|
@ -116,11 +116,7 @@ public interface TestClusterConfiguration {
|
|||
} catch (TestClustersException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
if (lastException == null) {
|
||||
lastException = e;
|
||||
} else {
|
||||
lastException = e;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (conditionMet == false) {
|
||||
|
@ -129,7 +125,7 @@ public interface TestClusterConfiguration {
|
|||
if (lastException == null) {
|
||||
throw new TestClustersException(message);
|
||||
} else {
|
||||
throw new TestClustersException(message, lastException);
|
||||
throw new TestClustersException(message + message, lastException);
|
||||
}
|
||||
}
|
||||
logger.info(
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.gradle.api.plugins.ExtraPropertiesExtension;
|
|||
import org.gradle.api.tasks.TaskContainer;
|
||||
import org.gradle.api.tasks.testing.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Collections;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
|
@ -56,46 +57,47 @@ public class TestFixturesPlugin implements Plugin<Project> {
|
|||
disableTaskByType(tasks, ThirdPartyAuditTask.class);
|
||||
disableTaskByType(tasks, JarHellTask.class);
|
||||
|
||||
// the project that defined a test fixture can also use it
|
||||
extension.fixtures.add(project);
|
||||
|
||||
Task buildFixture = project.getTasks().create("buildFixture");
|
||||
Task pullFixture = project.getTasks().create("pullFixture");
|
||||
Task preProcessFixture = project.getTasks().create("preProcessFixture");
|
||||
buildFixture.dependsOn(preProcessFixture);
|
||||
pullFixture.dependsOn(preProcessFixture);
|
||||
Task postProcessFixture = project.getTasks().create("postProcessFixture");
|
||||
postProcessFixture.dependsOn(buildFixture);
|
||||
preProcessFixture.onlyIf(spec -> buildFixture.getEnabled());
|
||||
postProcessFixture.onlyIf(spec -> buildFixture.getEnabled());
|
||||
|
||||
if (dockerComposeSupported(project) == false) {
|
||||
if (dockerComposeSupported() == false) {
|
||||
preProcessFixture.setEnabled(false);
|
||||
postProcessFixture.setEnabled(false);
|
||||
buildFixture.setEnabled(false);
|
||||
pullFixture.setEnabled(false);
|
||||
return;
|
||||
} else {
|
||||
project.apply(spec -> spec.plugin(BasePlugin.class));
|
||||
project.apply(spec -> spec.plugin(DockerComposePlugin.class));
|
||||
ComposeExtension composeExtension = project.getExtensions().getByType(ComposeExtension.class);
|
||||
composeExtension.setUseComposeFiles(Collections.singletonList(DOCKER_COMPOSE_YML));
|
||||
composeExtension.setRemoveContainers(true);
|
||||
composeExtension.setExecutable(
|
||||
project.file("/usr/local/bin/docker-compose").exists() ?
|
||||
"/usr/local/bin/docker-compose" : "/usr/bin/docker-compose"
|
||||
);
|
||||
|
||||
buildFixture.dependsOn(tasks.getByName("composeUp"));
|
||||
pullFixture.dependsOn(tasks.getByName("composePull"));
|
||||
tasks.getByName("composeUp").mustRunAfter(preProcessFixture);
|
||||
tasks.getByName("composePull").mustRunAfter(preProcessFixture);
|
||||
|
||||
configureServiceInfoForTask(
|
||||
postProcessFixture,
|
||||
project,
|
||||
(name, port) -> postProcessFixture.getExtensions()
|
||||
.getByType(ExtraPropertiesExtension.class).set(name, port)
|
||||
);
|
||||
}
|
||||
preProcessFixture.onlyIf(spec -> buildFixture.getEnabled());
|
||||
postProcessFixture.onlyIf(spec -> buildFixture.getEnabled());
|
||||
|
||||
project.apply(spec -> spec.plugin(BasePlugin.class));
|
||||
project.apply(spec -> spec.plugin(DockerComposePlugin.class));
|
||||
ComposeExtension composeExtension = project.getExtensions().getByType(ComposeExtension.class);
|
||||
composeExtension.setUseComposeFiles(Collections.singletonList(DOCKER_COMPOSE_YML));
|
||||
composeExtension.setRemoveContainers(true);
|
||||
composeExtension.setExecutable(
|
||||
project.file("/usr/local/bin/docker-compose").exists() ?
|
||||
"/usr/local/bin/docker-compose" : "/usr/bin/docker-compose"
|
||||
);
|
||||
|
||||
buildFixture.dependsOn(tasks.getByName("composeUp"));
|
||||
pullFixture.dependsOn(tasks.getByName("composePull"));
|
||||
tasks.getByName("composeUp").mustRunAfter(preProcessFixture);
|
||||
tasks.getByName("composePull").mustRunAfter(preProcessFixture);
|
||||
postProcessFixture.dependsOn(buildFixture);
|
||||
|
||||
configureServiceInfoForTask(
|
||||
postProcessFixture,
|
||||
project,
|
||||
(name, port) -> postProcessFixture.getExtensions()
|
||||
.getByType(ExtraPropertiesExtension.class).set(name, port)
|
||||
);
|
||||
extension.fixtures.add(project);
|
||||
}
|
||||
|
||||
extension.fixtures
|
||||
|
@ -107,7 +109,7 @@ public class TestFixturesPlugin implements Plugin<Project> {
|
|||
conditionTaskByType(tasks, extension, TestingConventionsTasks.class);
|
||||
conditionTaskByType(tasks, extension, ComposeUp.class);
|
||||
|
||||
if (dockerComposeSupported(project) == false) {
|
||||
if (dockerComposeSupported() == false) {
|
||||
project.getLogger().warn(
|
||||
"Tests for {} require docker-compose at /usr/local/bin/docker-compose or /usr/bin/docker-compose " +
|
||||
"but none could be found so these will be skipped", project.getPath()
|
||||
|
@ -135,7 +137,9 @@ public class TestFixturesPlugin implements Plugin<Project> {
|
|||
taskClass,
|
||||
task -> task.onlyIf(spec ->
|
||||
extension.fixtures.stream()
|
||||
.anyMatch(fixtureProject -> fixtureProject.getTasks().getByName("buildFixture").getEnabled() == false) == false
|
||||
.anyMatch(fixtureProject ->
|
||||
fixtureProject.getTasks().getByName("buildFixture").getEnabled() == false
|
||||
) == false
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -168,12 +172,12 @@ public class TestFixturesPlugin implements Plugin<Project> {
|
|||
);
|
||||
}
|
||||
|
||||
public boolean dockerComposeSupported(Project project) {
|
||||
public static boolean dockerComposeSupported() {
|
||||
if (OS.current().equals(OS.WINDOWS)) {
|
||||
return false;
|
||||
}
|
||||
final boolean hasDockerCompose = project.file("/usr/local/bin/docker-compose").exists() ||
|
||||
project.file("/usr/bin/docker-compose").exists();
|
||||
final boolean hasDockerCompose = (new File("/usr/local/bin/docker-compose")).exists() ||
|
||||
(new File("/usr/bin/docker-compose").exists());
|
||||
return hasDockerCompose && Boolean.parseBoolean(System.getProperty("tests.fixture.enabled", "true"));
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
package org.elasticsearch.gradle.tool;
|
||||
|
||||
public class ClasspathUtils {
|
||||
private static boolean isElasticsearchProject;
|
||||
|
||||
static {
|
||||
// look for buildSrc marker file, if it exists then we are running in the context of the elastic/elasticsearch build
|
||||
isElasticsearchProject = ClasspathUtils.class.getResource("/buildSrc.marker") != null;
|
||||
}
|
||||
|
||||
private ClasspathUtils() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if we are running in the context of the `elastic/elasticsearch` project. This method will return {@code false} when
|
||||
* the build-tools project is pulled in as an external dependency.
|
||||
*
|
||||
* @return if we are currently running in the `elastic/elasticsearch` project
|
||||
*/
|
||||
public static boolean isElasticsearchProject() {
|
||||
return isElasticsearchProject;
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
implementation-class=org.elasticsearch.gradle.info.GlobalBuildInfoPlugin
|
|
@ -1,20 +0,0 @@
|
|||
#
|
||||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
implementation-class=org.elasticsearch.gradle.test.MessyTestPlugin
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.gradle;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.elasticsearch.gradle.test.GradleUnitTestCase;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.testfixtures.ProjectBuilder;
|
||||
|
||||
public class EmptyDirTaskTests extends GradleUnitTestCase {
|
||||
|
||||
public void testCreateEmptyDir() throws Exception {
|
||||
Project project = ProjectBuilder.builder().build();
|
||||
EmptyDirTask emptyDirTask = project.getTasks().create("emptyDirTask", EmptyDirTask.class);
|
||||
assertEquals(0755, emptyDirTask.getDirMode());
|
||||
|
||||
// generate a new temporary folder and make sure it does not exists
|
||||
File newEmptyFolder = getNewNonExistingTempFolderFile(project);
|
||||
|
||||
emptyDirTask.setDir(newEmptyFolder);
|
||||
emptyDirTask.create();
|
||||
|
||||
assertTrue(newEmptyFolder.exists());
|
||||
assertTrue(newEmptyFolder.isDirectory());
|
||||
assertTrue(newEmptyFolder.canExecute());
|
||||
assertTrue(newEmptyFolder.canRead());
|
||||
assertTrue(newEmptyFolder.canWrite());
|
||||
|
||||
// cleanup
|
||||
newEmptyFolder.delete();
|
||||
}
|
||||
|
||||
public void testCreateEmptyDirNoPermissions() throws Exception {
|
||||
Project project = ProjectBuilder.builder().build();
|
||||
EmptyDirTask emptyDirTask = project.getTasks().create("emptyDirTask", EmptyDirTask.class);
|
||||
emptyDirTask.setDirMode(0000);
|
||||
|
||||
// generate a new temporary folder and make sure it does not exists
|
||||
File newEmptyFolder = getNewNonExistingTempFolderFile(project);
|
||||
|
||||
emptyDirTask.setDir(newEmptyFolder);
|
||||
emptyDirTask.create();
|
||||
|
||||
assertTrue(newEmptyFolder.exists());
|
||||
assertTrue(newEmptyFolder.isDirectory());
|
||||
assertFalse(newEmptyFolder.canExecute());
|
||||
assertFalse(newEmptyFolder.canRead());
|
||||
assertFalse(newEmptyFolder.canWrite());
|
||||
|
||||
// cleanup
|
||||
newEmptyFolder.delete();
|
||||
}
|
||||
|
||||
private File getNewNonExistingTempFolderFile(Project project) throws IOException {
|
||||
File newEmptyFolder = new File(project.getBuildDir(), "empty-dir");
|
||||
assertFalse(newEmptyFolder.exists());
|
||||
return newEmptyFolder;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gradle.plugin;
|
||||
|
||||
import org.elasticsearch.gradle.test.GradleUnitTestCase;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.plugins.JavaPlugin;
|
||||
import org.gradle.testfixtures.ProjectBuilder;
|
||||
|
||||
public class PluginPropertiesExtensionTests extends GradleUnitTestCase {
|
||||
|
||||
public void testCreatingPluginPropertiesExtensionWithNameAndVersion() {
|
||||
String projectName = "Test";
|
||||
String projectVersion = "5.0";
|
||||
|
||||
PluginPropertiesExtension pluginPropertiesExtension =
|
||||
new PluginPropertiesExtension(this.createProject(projectName, projectVersion));
|
||||
|
||||
assertEquals(projectName, pluginPropertiesExtension.getName());
|
||||
assertEquals(projectVersion, pluginPropertiesExtension.getVersion());
|
||||
}
|
||||
|
||||
public void testCreatingPluginPropertiesExtensionWithNameWithoutVersion() {
|
||||
String projectName = "Test";
|
||||
|
||||
PluginPropertiesExtension pluginPropertiesExtension =
|
||||
new PluginPropertiesExtension(this.createProject(projectName, null));
|
||||
|
||||
assertEquals(projectName, pluginPropertiesExtension.getName());
|
||||
assertEquals("unspecified", pluginPropertiesExtension.getVersion());
|
||||
}
|
||||
|
||||
private Project createProject(String projectName, String version) {
|
||||
Project project = ProjectBuilder.builder().withName(projectName).build();
|
||||
project.setVersion(version);
|
||||
|
||||
project.getPlugins().apply(JavaPlugin.class);
|
||||
|
||||
return project;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,268 @@
|
|||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.elasticsearch.gradle.test.GradleUnitTestCase;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.artifacts.Dependency;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.plugins.JavaPlugin;
|
||||
import org.gradle.testfixtures.ProjectBuilder;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
||||
public class DependencyLicensesTaskTests extends GradleUnitTestCase {
|
||||
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
private UpdateShasTask updateShas;
|
||||
|
||||
private DependencyLicensesTask task;
|
||||
|
||||
private Project project;
|
||||
|
||||
private Dependency dependency;
|
||||
|
||||
@Before
|
||||
public void prepare() {
|
||||
project = createProject();
|
||||
task = createDependencyLicensesTask(project);
|
||||
updateShas = createUpdateShasTask(project, task);
|
||||
dependency = project.getDependencies().localGroovy();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithLicensesDirButNoDependenciesThenShouldThrowException() throws Exception {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("exists, but there are no dependencies"));
|
||||
|
||||
getLicensesDir(project).mkdir();
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithoutLicensesDirButWithDependenciesThenShouldThrowException() throws Exception {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("does not exist, but there are dependencies"));
|
||||
|
||||
project.getDependencies().add("compile", dependency);
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithoutLicensesDirNorDependenciesThenShouldReturnSilently() throws Exception {
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithDependencyButNoShaFileThenShouldReturnException() throws Exception {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("Missing SHA for "));
|
||||
|
||||
File licensesDir = getLicensesDir(project);
|
||||
createFileIn(licensesDir, "groovy-all-LICENSE.txt", "");
|
||||
createFileIn(licensesDir, "groovy-all-NOTICE.txt", "");
|
||||
|
||||
project.getDependencies().add("compile", project.getDependencies().localGroovy());
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithDependencyButNoLicenseFileThenShouldReturnException() throws Exception {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("Missing LICENSE for "));
|
||||
|
||||
project.getDependencies().add("compile", project.getDependencies().localGroovy());
|
||||
|
||||
getLicensesDir(project).mkdir();
|
||||
updateShas.updateShas();
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithDependencyButNoNoticeFileThenShouldReturnException() throws Exception {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("Missing NOTICE for "));
|
||||
|
||||
project.getDependencies().add("compile", dependency);
|
||||
|
||||
createFileIn(getLicensesDir(project), "groovy-all-LICENSE.txt", "");
|
||||
|
||||
updateShas.updateShas();
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithDependencyAndEverythingInOrderThenShouldReturnSilently() throws Exception {
|
||||
project.getDependencies().add("compile", dependency);
|
||||
|
||||
File licensesDir = getLicensesDir(project);
|
||||
|
||||
createAllDefaultDependencyFiles(licensesDir, "groovy-all");
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithALicenseButWithoutTheDependencyThenShouldThrowException() throws Exception {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("Unused license "));
|
||||
|
||||
project.getDependencies().add("compile", dependency);
|
||||
|
||||
File licensesDir = getLicensesDir(project);
|
||||
createAllDefaultDependencyFiles(licensesDir, "groovy-all");
|
||||
createFileIn(licensesDir, "non-declared-LICENSE.txt", "");
|
||||
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithANoticeButWithoutTheDependencyThenShouldThrowException() throws Exception {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("Unused notice "));
|
||||
|
||||
project.getDependencies().add("compile", dependency);
|
||||
|
||||
File licensesDir = getLicensesDir(project);
|
||||
createAllDefaultDependencyFiles(licensesDir, "groovy-all");
|
||||
createFileIn(licensesDir, "non-declared-NOTICE.txt", "");
|
||||
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithAShaButWithoutTheDependencyThenShouldThrowException() throws Exception {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("Unused sha files found: \n"));
|
||||
|
||||
project.getDependencies().add("compile", dependency);
|
||||
|
||||
File licensesDir = getLicensesDir(project);
|
||||
createAllDefaultDependencyFiles(licensesDir, "groovy-all");
|
||||
createFileIn(licensesDir, "non-declared.sha1", "");
|
||||
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithADependencyWithWrongShaThenShouldThrowException() throws Exception {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("SHA has changed! Expected "));
|
||||
|
||||
project.getDependencies().add("compile", dependency);
|
||||
|
||||
File licensesDir = getLicensesDir(project);
|
||||
createAllDefaultDependencyFiles(licensesDir, "groovy-all");
|
||||
|
||||
Path groovySha = Files
|
||||
.list(licensesDir.toPath())
|
||||
.filter(file -> file.toFile().getName().contains("sha"))
|
||||
.findFirst().get();
|
||||
|
||||
Files.write(groovySha, new byte[] { 1 }, StandardOpenOption.CREATE);
|
||||
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithADependencyMappingThenShouldReturnSilently() throws Exception {
|
||||
project.getDependencies().add("compile", dependency);
|
||||
|
||||
File licensesDir = getLicensesDir(project);
|
||||
createAllDefaultDependencyFiles(licensesDir, "groovy");
|
||||
|
||||
Map<String, String> mappings = new HashMap<>();
|
||||
mappings.put("from", "groovy-all");
|
||||
mappings.put("to", "groovy");
|
||||
|
||||
task.mapping(mappings);
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithAIgnoreShaConfigurationAndNoShaFileThenShouldReturnSilently() throws Exception {
|
||||
project.getDependencies().add("compile", dependency);
|
||||
|
||||
File licensesDir = getLicensesDir(project);
|
||||
createFileIn(licensesDir, "groovy-all-LICENSE.txt", "");
|
||||
createFileIn(licensesDir, "groovy-all-NOTICE.txt", "");
|
||||
|
||||
task.ignoreSha("groovy-all");
|
||||
task.checkDependencies();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenProjectWithoutLicensesDirWhenAskingForShaFilesThenShouldThrowException() {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("isn't a valid directory"));
|
||||
|
||||
task.getShaFiles();
|
||||
}
|
||||
|
||||
private Project createProject() {
|
||||
Project project = ProjectBuilder.builder().build();
|
||||
project.getPlugins().apply(JavaPlugin.class);
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
private void createAllDefaultDependencyFiles(File licensesDir, String dependencyName) throws IOException, NoSuchAlgorithmException {
|
||||
createFileIn(licensesDir, dependencyName + "-LICENSE.txt", "");
|
||||
createFileIn(licensesDir, dependencyName + "-NOTICE.txt", "");
|
||||
|
||||
updateShas.updateShas();
|
||||
}
|
||||
|
||||
private File getLicensesDir(Project project) {
|
||||
return getFile(project, "licenses");
|
||||
}
|
||||
|
||||
private File getFile(Project project, String fileName) {
|
||||
return project.getProjectDir().toPath().resolve(fileName).toFile();
|
||||
}
|
||||
|
||||
private void createFileIn(File parent, String name, String content) throws IOException {
|
||||
parent.mkdir();
|
||||
|
||||
Path file = parent.toPath().resolve(name);
|
||||
file.toFile().createNewFile();
|
||||
|
||||
Files.write(file, content.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
private UpdateShasTask createUpdateShasTask(Project project, DependencyLicensesTask dependencyLicensesTask) {
|
||||
UpdateShasTask task = project.getTasks()
|
||||
.register("updateShas", UpdateShasTask.class)
|
||||
.get();
|
||||
|
||||
task.setParentTask(dependencyLicensesTask);
|
||||
return task;
|
||||
}
|
||||
|
||||
private DependencyLicensesTask createDependencyLicensesTask(Project project) {
|
||||
DependencyLicensesTask task = project.getTasks()
|
||||
.register("dependencyLicenses", DependencyLicensesTask.class)
|
||||
.get();
|
||||
|
||||
task.setDependencies(getDependencies(project));
|
||||
return task;
|
||||
}
|
||||
|
||||
private FileCollection getDependencies(Project project) {
|
||||
return project.getConfigurations().getByName("compile");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
package org.elasticsearch.gradle.precommit;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.elasticsearch.gradle.test.GradleUnitTestCase;
|
||||
import org.gradle.api.GradleException;
|
||||
import org.gradle.api.Project;
|
||||
import org.gradle.api.artifacts.Dependency;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.plugins.JavaPlugin;
|
||||
import org.gradle.testfixtures.ProjectBuilder;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
||||
public class UpdateShasTaskTests extends GradleUnitTestCase {
|
||||
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
private UpdateShasTask task;
|
||||
|
||||
private Project project;
|
||||
|
||||
private Dependency dependency;
|
||||
|
||||
@Before
|
||||
public void prepare() throws IOException {
|
||||
project = createProject();
|
||||
task = createUpdateShasTask(project);
|
||||
dependency = project.getDependencies().localGroovy();
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void whenDependencyDoesntExistThenShouldDeleteDependencySha()
|
||||
throws IOException, NoSuchAlgorithmException {
|
||||
|
||||
File unusedSha = createFileIn(getLicensesDir(project), "test.sha1", "");
|
||||
task.updateShas();
|
||||
|
||||
assertFalse(unusedSha.exists());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void whenDependencyExistsButShaNotThenShouldCreateNewShaFile()
|
||||
throws IOException, NoSuchAlgorithmException {
|
||||
project.getDependencies().add("compile", dependency);
|
||||
|
||||
getLicensesDir(project).mkdir();
|
||||
task.updateShas();
|
||||
|
||||
Path groovySha = Files
|
||||
.list(getLicensesDir(project).toPath())
|
||||
.findFirst().get();
|
||||
|
||||
assertTrue(groovySha.toFile().getName().startsWith("groovy-all"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void whenDependencyAndWrongShaExistsThenShouldNotOverwriteShaFile()
|
||||
throws IOException, NoSuchAlgorithmException {
|
||||
project.getDependencies().add("compile", dependency);
|
||||
|
||||
File groovyJar = task.getParentTask().getDependencies().getFiles().iterator().next();
|
||||
String groovyShaName = groovyJar.getName() + ".sha1";
|
||||
|
||||
File groovySha = createFileIn(getLicensesDir(project), groovyShaName, "content");
|
||||
task.updateShas();
|
||||
|
||||
assertThat(FileUtils.readFileToString(groovySha), equalTo("content"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void whenLicensesDirDoesntExistThenShouldThrowException()
|
||||
throws IOException, NoSuchAlgorithmException {
|
||||
expectedException.expect(GradleException.class);
|
||||
expectedException.expectMessage(containsString("isn't a valid directory"));
|
||||
|
||||
task.updateShas();
|
||||
}
|
||||
|
||||
private Project createProject() {
|
||||
Project project = ProjectBuilder.builder().build();
|
||||
project.getPlugins().apply(JavaPlugin.class);
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
private File getLicensesDir(Project project) {
|
||||
return getFile(project, "licenses");
|
||||
}
|
||||
|
||||
private File getFile(Project project, String fileName) {
|
||||
return project.getProjectDir().toPath().resolve(fileName).toFile();
|
||||
}
|
||||
|
||||
private File createFileIn(File parent, String name, String content) throws IOException {
|
||||
parent.mkdir();
|
||||
|
||||
Path path = parent.toPath().resolve(name);
|
||||
File file = path.toFile();
|
||||
|
||||
Files.write(path, content.getBytes(), StandardOpenOption.CREATE);
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
private UpdateShasTask createUpdateShasTask(Project project) {
|
||||
UpdateShasTask task = project.getTasks()
|
||||
.register("updateShas", UpdateShasTask.class)
|
||||
.get();
|
||||
|
||||
task.setParentTask(createDependencyLicensesTask(project));
|
||||
return task;
|
||||
}
|
||||
|
||||
private DependencyLicensesTask createDependencyLicensesTask(Project project) {
|
||||
DependencyLicensesTask task = project.getTasks()
|
||||
.register("dependencyLicenses", DependencyLicensesTask.class)
|
||||
.get();
|
||||
|
||||
task.setDependencies(getDependencies(project));
|
||||
return task;
|
||||
}
|
||||
|
||||
private FileCollection getDependencies(Project project) {
|
||||
return project.getConfigurations().getByName("compile");
|
||||
}
|
||||
}
|
|
@ -29,6 +29,7 @@ forbiddenApisTest.enabled = false
|
|||
jarHell.enabled = false
|
||||
// we don't have tests for now
|
||||
test.enabled = false
|
||||
thirdPartyAudit.enabled = false
|
||||
|
||||
task hello {
|
||||
doFirst {
|
||||
|
|
|
@ -34,12 +34,12 @@ test.enabled = false
|
|||
dependencies {
|
||||
compile 'org.apache.commons:commons-math3:3.2'
|
||||
|
||||
compile("org.elasticsearch.client:elasticsearch-rest-client:${version}")
|
||||
compile project(":client:rest")
|
||||
// bottleneck should be the client, not Elasticsearch
|
||||
compile project(path: ':client:client-benchmark-noop-api-plugin')
|
||||
// for transport client
|
||||
compile("org.elasticsearch:elasticsearch:${version}")
|
||||
compile("org.elasticsearch.client:transport:${version}")
|
||||
compile project(":server")
|
||||
compile project(":client:transport")
|
||||
compile project(path: ':modules:transport-netty4', configuration: 'runtime')
|
||||
compile project(path: ':modules:reindex', configuration: 'runtime')
|
||||
compile project(path: ':modules:lang-mustache', configuration: 'runtime')
|
||||
|
|
|
@ -50,24 +50,24 @@ dependencies {
|
|||
* Everything in the "shadow" configuration is *not* copied into the
|
||||
* shadowJar.
|
||||
*/
|
||||
compile "org.elasticsearch:elasticsearch:${version}"
|
||||
compile "org.elasticsearch.client:elasticsearch-rest-client:${version}"
|
||||
compile "org.elasticsearch.plugin:parent-join-client:${version}"
|
||||
compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}"
|
||||
compile "org.elasticsearch.plugin:rank-eval-client:${version}"
|
||||
compile "org.elasticsearch.plugin:lang-mustache-client:${version}"
|
||||
compile project(':server')
|
||||
compile project(':client:rest')
|
||||
compile project(':modules:parent-join')
|
||||
compile project(':modules:aggs-matrix-stats')
|
||||
compile project(':modules:rank-eval')
|
||||
compile project(':modules:lang-mustache')
|
||||
|
||||
testCompile "org.elasticsearch.client:test:${version}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
testCompile project(':client:test')
|
||||
testCompile project(':test:framework')
|
||||
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
|
||||
testCompile "junit:junit:${versions.junit}"
|
||||
//this is needed to make RestHighLevelClientTests#testApiNamingConventions work from IDEs
|
||||
testCompile "org.elasticsearch:rest-api-spec:${version}"
|
||||
testCompile project(":rest-api-spec")
|
||||
// Needed for serialization tests:
|
||||
// (In order to serialize a server side class to a client side class or the other way around)
|
||||
testCompile "org.elasticsearch.plugin:x-pack-core:${version}"
|
||||
testCompile project(':x-pack:plugin:core')
|
||||
|
||||
restSpec "org.elasticsearch:rest-api-spec:${version}"
|
||||
restSpec project(':rest-api-spec')
|
||||
}
|
||||
|
||||
//we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions)
|
||||
|
|
|
@ -22,8 +22,6 @@ package org.elasticsearch.client;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
|
@ -47,6 +45,8 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryReques
|
|||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.core.ShardsAcknowledgedResponse;
|
||||
import org.elasticsearch.client.indices.AnalyzeRequest;
|
||||
import org.elasticsearch.client.indices.AnalyzeResponse;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.client.indices.CreateIndexResponse;
|
||||
import org.elasticsearch.client.indices.FreezeIndexRequest;
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.http.client.methods.HttpPost;
|
|||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
|
@ -41,6 +40,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
|
|||
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
|
||||
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.elasticsearch.client.indices.AnalyzeRequest;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.client.indices.FreezeIndexRequest;
|
||||
import org.elasticsearch.client.indices.GetFieldMappingsRequest;
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
|||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.explain.ExplainRequest;
|
||||
|
@ -52,6 +51,7 @@ import org.elasticsearch.action.update.UpdateRequest;
|
|||
import org.elasticsearch.client.core.CountRequest;
|
||||
import org.elasticsearch.client.core.MultiTermVectorsRequest;
|
||||
import org.elasticsearch.client.core.TermVectorsRequest;
|
||||
import org.elasticsearch.client.indices.AnalyzeRequest;
|
||||
import org.elasticsearch.client.security.RefreshPolicy;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -585,8 +585,8 @@ final class RequestConverters {
|
|||
if (updateByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) {
|
||||
params.putParam("scroll", updateByQueryRequest.getScrollTime());
|
||||
}
|
||||
if (updateByQueryRequest.getSize() > 0) {
|
||||
params.putParam("size", Integer.toString(updateByQueryRequest.getSize()));
|
||||
if (updateByQueryRequest.getMaxDocs() > 0) {
|
||||
params.putParam("max_docs", Integer.toString(updateByQueryRequest.getMaxDocs()));
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(updateByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
|
@ -613,8 +613,8 @@ final class RequestConverters {
|
|||
if (deleteByQueryRequest.getScrollTime() != AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT) {
|
||||
params.putParam("scroll", deleteByQueryRequest.getScrollTime());
|
||||
}
|
||||
if (deleteByQueryRequest.getSize() > 0) {
|
||||
params.putParam("size", Integer.toString(deleteByQueryRequest.getSize()));
|
||||
if (deleteByQueryRequest.getMaxDocs() > 0) {
|
||||
params.putParam("max_docs", Integer.toString(deleteByQueryRequest.getMaxDocs()));
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
|
|
|
@ -101,6 +101,7 @@ public final class IndicesFollowStats {
|
|||
static final ParseField WRITE_BUFFER_SIZE_IN_BYTES_FIELD = new ParseField("write_buffer_size_in_bytes");
|
||||
static final ParseField FOLLOWER_MAPPING_VERSION_FIELD = new ParseField("follower_mapping_version");
|
||||
static final ParseField FOLLOWER_SETTINGS_VERSION_FIELD = new ParseField("follower_settings_version");
|
||||
static final ParseField FOLLOWER_ALIASES_VERSION_FIELD = new ParseField("follower_aliases_version");
|
||||
static final ParseField TOTAL_READ_TIME_MILLIS_FIELD = new ParseField("total_read_time_millis");
|
||||
static final ParseField TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD = new ParseField("total_read_remote_exec_time_millis");
|
||||
static final ParseField SUCCESSFUL_READ_REQUESTS_FIELD = new ParseField("successful_read_requests");
|
||||
|
@ -117,41 +118,42 @@ public final class IndicesFollowStats {
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final ConstructingObjectParser<ShardFollowStats, Void> PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"shard-follow-stats",
|
||||
true,
|
||||
args -> new ShardFollowStats(
|
||||
(String) args[0],
|
||||
(String) args[1],
|
||||
(String) args[2],
|
||||
(int) args[3],
|
||||
(long) args[4],
|
||||
(long) args[5],
|
||||
(long) args[6],
|
||||
(long) args[7],
|
||||
(long) args[8],
|
||||
(int) args[9],
|
||||
(int) args[10],
|
||||
(int) args[11],
|
||||
(long) args[12],
|
||||
(long) args[13],
|
||||
(long) args[14],
|
||||
(long) args[15],
|
||||
(long) args[16],
|
||||
(long) args[17],
|
||||
(long) args[18],
|
||||
(long) args[19],
|
||||
(long) args[20],
|
||||
(long) args[21],
|
||||
(long) args[22],
|
||||
(long) args[23],
|
||||
(long) args[24],
|
||||
(long) args[25],
|
||||
new TreeMap<>(
|
||||
((List<Map.Entry<Long, Tuple<Integer, ElasticsearchException>>>) args[26])
|
||||
.stream()
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))),
|
||||
(ElasticsearchException) args[27]));
|
||||
new ConstructingObjectParser<>(
|
||||
"shard-follow-stats",
|
||||
true,
|
||||
args -> new ShardFollowStats(
|
||||
(String) args[0],
|
||||
(String) args[1],
|
||||
(String) args[2],
|
||||
(int) args[3],
|
||||
(long) args[4],
|
||||
(long) args[5],
|
||||
(long) args[6],
|
||||
(long) args[7],
|
||||
(long) args[8],
|
||||
(int) args[9],
|
||||
(int) args[10],
|
||||
(int) args[11],
|
||||
(long) args[12],
|
||||
(long) args[13],
|
||||
(long) args[14],
|
||||
(long) args[15],
|
||||
(long) args[16],
|
||||
(long) args[17],
|
||||
(long) args[18],
|
||||
(long) args[19],
|
||||
(long) args[20],
|
||||
(long) args[21],
|
||||
(long) args[22],
|
||||
(long) args[23],
|
||||
(long) args[24],
|
||||
(long) args[25],
|
||||
(long) args[26],
|
||||
new TreeMap<>(
|
||||
((List<Map.Entry<Long, Tuple<Integer, ElasticsearchException>>>) args[27])
|
||||
.stream()
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))),
|
||||
(ElasticsearchException) args[28]));
|
||||
|
||||
static final ConstructingObjectParser<Map.Entry<Long, Tuple<Integer, ElasticsearchException>>, Void> READ_EXCEPTIONS_ENTRY_PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
|
@ -175,6 +177,7 @@ public final class IndicesFollowStats {
|
|||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), WRITE_BUFFER_SIZE_IN_BYTES_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_MAPPING_VERSION_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_SETTINGS_VERSION_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_ALIASES_VERSION_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_READ_TIME_MILLIS_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD);
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SUCCESSFUL_READ_REQUESTS_FIELD);
|
||||
|
@ -220,6 +223,7 @@ public final class IndicesFollowStats {
|
|||
private final long writeBufferSizeInBytes;
|
||||
private final long followerMappingVersion;
|
||||
private final long followerSettingsVersion;
|
||||
private final long followerAliasesVersion;
|
||||
private final long totalReadTimeMillis;
|
||||
private final long totalReadRemoteExecTimeMillis;
|
||||
private final long successfulReadRequests;
|
||||
|
@ -249,6 +253,7 @@ public final class IndicesFollowStats {
|
|||
long writeBufferSizeInBytes,
|
||||
long followerMappingVersion,
|
||||
long followerSettingsVersion,
|
||||
long followerAliasesVersion,
|
||||
long totalReadTimeMillis,
|
||||
long totalReadRemoteExecTimeMillis,
|
||||
long successfulReadRequests,
|
||||
|
@ -277,6 +282,7 @@ public final class IndicesFollowStats {
|
|||
this.writeBufferSizeInBytes = writeBufferSizeInBytes;
|
||||
this.followerMappingVersion = followerMappingVersion;
|
||||
this.followerSettingsVersion = followerSettingsVersion;
|
||||
this.followerAliasesVersion = followerAliasesVersion;
|
||||
this.totalReadTimeMillis = totalReadTimeMillis;
|
||||
this.totalReadRemoteExecTimeMillis = totalReadRemoteExecTimeMillis;
|
||||
this.successfulReadRequests = successfulReadRequests;
|
||||
|
@ -352,6 +358,10 @@ public final class IndicesFollowStats {
|
|||
return followerSettingsVersion;
|
||||
}
|
||||
|
||||
public long getFollowerAliasesVersion() {
|
||||
return followerAliasesVersion;
|
||||
}
|
||||
|
||||
public long getTotalReadTimeMillis() {
|
||||
return totalReadTimeMillis;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,343 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A request to analyze text
|
||||
*/
|
||||
public class AnalyzeRequest implements Validatable, ToXContentObject {
|
||||
|
||||
private String index;
|
||||
|
||||
private String[] text;
|
||||
|
||||
private String analyzer;
|
||||
|
||||
private NameOrDefinition tokenizer;
|
||||
|
||||
private final List<NameOrDefinition> tokenFilters = new ArrayList<>();
|
||||
|
||||
private final List<NameOrDefinition> charFilters = new ArrayList<>();
|
||||
|
||||
private String field;
|
||||
|
||||
private boolean explain = false;
|
||||
|
||||
private String[] attributes = Strings.EMPTY_ARRAY;
|
||||
|
||||
private String normalizer;
|
||||
|
||||
/**
|
||||
* Analyzes text using a global analyzer
|
||||
*/
|
||||
public static AnalyzeRequest withGlobalAnalyzer(String analyzer, String... text) {
|
||||
return new AnalyzeRequest(null, analyzer, null, null, text);
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes text using a custom analyzer built from global components
|
||||
*/
|
||||
public static CustomAnalyzerBuilder buildCustomAnalyzer(String tokenizer) {
|
||||
return new CustomAnalyzerBuilder(null, new NameOrDefinition(tokenizer));
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes text using a custom analyzer built from global components
|
||||
*/
|
||||
public static CustomAnalyzerBuilder buildCustomAnalyzer(Map<String, Object> tokenizerSettings) {
|
||||
return new CustomAnalyzerBuilder(null, new NameOrDefinition(tokenizerSettings));
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes text using a custom analyzer built from components defined on an index
|
||||
*/
|
||||
public static CustomAnalyzerBuilder buildCustomAnalyzer(String index, String tokenizer) {
|
||||
return new CustomAnalyzerBuilder(index, new NameOrDefinition(tokenizer));
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes text using a custom analyzer built from components defined on an index
|
||||
*/
|
||||
public static CustomAnalyzerBuilder buildCustomAnalyzer(String index, Map<String, Object> tokenizerSettings) {
|
||||
return new CustomAnalyzerBuilder(index, new NameOrDefinition(tokenizerSettings));
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes text using a named analyzer on an index
|
||||
*/
|
||||
public static AnalyzeRequest withIndexAnalyzer(String index, String analyzer, String... text) {
|
||||
return new AnalyzeRequest(index, analyzer, null, null, text);
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes text using the analyzer defined on a specific field within an index
|
||||
*/
|
||||
public static AnalyzeRequest withField(String index, String field, String... text) {
|
||||
return new AnalyzeRequest(index, null, null, field, text);
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes text using a named normalizer on an index
|
||||
*/
|
||||
public static AnalyzeRequest withNormalizer(String index, String normalizer, String... text) {
|
||||
return new AnalyzeRequest(index, null, normalizer, null, text);
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes text using a custom normalizer built from global components
|
||||
*/
|
||||
public static CustomAnalyzerBuilder buildCustomNormalizer() {
|
||||
return new CustomAnalyzerBuilder(null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes text using a custom normalizer built from components defined on an index
|
||||
*/
|
||||
public static CustomAnalyzerBuilder buildCustomNormalizer(String index) {
|
||||
return new CustomAnalyzerBuilder(index, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class to build custom analyzer definitions
|
||||
*/
|
||||
public static class CustomAnalyzerBuilder {
|
||||
|
||||
final NameOrDefinition tokenizer;
|
||||
final String index;
|
||||
List<NameOrDefinition> charFilters = new ArrayList<>();
|
||||
List<NameOrDefinition> tokenFilters = new ArrayList<>();
|
||||
|
||||
CustomAnalyzerBuilder(String index, NameOrDefinition tokenizer) {
|
||||
this.tokenizer = tokenizer;
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
public CustomAnalyzerBuilder addCharFilter(String name) {
|
||||
charFilters.add(new NameOrDefinition(name));
|
||||
return this;
|
||||
}
|
||||
|
||||
public CustomAnalyzerBuilder addCharFilter(Map<String, Object> settings) {
|
||||
charFilters.add(new NameOrDefinition(settings));
|
||||
return this;
|
||||
}
|
||||
|
||||
public CustomAnalyzerBuilder addTokenFilter(String name) {
|
||||
tokenFilters.add(new NameOrDefinition(name));
|
||||
return this;
|
||||
}
|
||||
|
||||
public CustomAnalyzerBuilder addTokenFilter(Map<String, Object> settings) {
|
||||
tokenFilters.add(new NameOrDefinition(settings));
|
||||
return this;
|
||||
}
|
||||
|
||||
public AnalyzeRequest build(String... text) {
|
||||
return new AnalyzeRequest(index, tokenizer, charFilters, tokenFilters, text);
|
||||
}
|
||||
}
|
||||
|
||||
private AnalyzeRequest(String index, String analyzer, String normalizer, String field, String... text) {
|
||||
this.index = index;
|
||||
this.analyzer = analyzer;
|
||||
this.normalizer = normalizer;
|
||||
this.field = field;
|
||||
this.text = text;
|
||||
}
|
||||
|
||||
private AnalyzeRequest(String index, NameOrDefinition tokenizer, List<NameOrDefinition> charFilters,
|
||||
List<NameOrDefinition> tokenFilters, String... text) {
|
||||
this.index = index;
|
||||
this.analyzer = null;
|
||||
this.normalizer = null;
|
||||
this.field = null;
|
||||
this.tokenizer = tokenizer;
|
||||
this.charFilters.addAll(charFilters);
|
||||
this.tokenFilters.addAll(tokenFilters);
|
||||
this.text = text;
|
||||
}
|
||||
|
||||
static class NameOrDefinition implements ToXContentFragment {
|
||||
// exactly one of these two members is not null
|
||||
public final String name;
|
||||
public final Settings definition;
|
||||
|
||||
NameOrDefinition(String name) {
|
||||
this.name = Objects.requireNonNull(name);
|
||||
this.definition = null;
|
||||
}
|
||||
|
||||
NameOrDefinition(Settings settings) {
|
||||
this.name = null;
|
||||
this.definition = Objects.requireNonNull(settings);
|
||||
}
|
||||
|
||||
NameOrDefinition(Map<String, ?> definition) {
|
||||
this.name = null;
|
||||
Objects.requireNonNull(definition);
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
builder.map(definition);
|
||||
this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build();
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("Failed to parse [" + definition + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (definition == null) {
|
||||
return builder.value(name);
|
||||
}
|
||||
builder.startObject();
|
||||
definition.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the index that the request should be executed against, or {@code null} if
|
||||
* no index is specified
|
||||
*/
|
||||
public String index() {
|
||||
return this.index;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the text to be analyzed
|
||||
*/
|
||||
public String[] text() {
|
||||
return this.text;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the named analyzer used for analysis, if defined
|
||||
*/
|
||||
public String analyzer() {
|
||||
return this.analyzer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the named tokenizer used for analysis, if defined
|
||||
*/
|
||||
public String normalizer() {
|
||||
return this.normalizer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a custom Tokenizer used for analysis, if defined
|
||||
*/
|
||||
public NameOrDefinition tokenizer() {
|
||||
return this.tokenizer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the custom token filters used for analysis, if defined
|
||||
*/
|
||||
public List<NameOrDefinition> tokenFilters() {
|
||||
return this.tokenFilters;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the custom character filters used for analysis, if defined
|
||||
*/
|
||||
public List<NameOrDefinition> charFilters() {
|
||||
return this.charFilters;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the field to take an Analyzer from, if defined
|
||||
*/
|
||||
public String field() {
|
||||
return this.field;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether or not detailed explanations of analysis should be returned
|
||||
*/
|
||||
public AnalyzeRequest explain(boolean explain) {
|
||||
this.explain = explain;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean explain() {
|
||||
return this.explain;
|
||||
}
|
||||
|
||||
public AnalyzeRequest attributes(String... attributes) {
|
||||
if (attributes == null) {
|
||||
throw new IllegalArgumentException("attributes must not be null");
|
||||
}
|
||||
this.attributes = attributes;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String[] attributes() {
|
||||
return this.attributes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("text", text);
|
||||
if (Strings.isNullOrEmpty(analyzer) == false) {
|
||||
builder.field("analyzer", analyzer);
|
||||
}
|
||||
if (tokenizer != null) {
|
||||
builder.field("tokenizer", tokenizer);
|
||||
}
|
||||
if (tokenFilters.size() > 0) {
|
||||
builder.field("filter", tokenFilters);
|
||||
}
|
||||
if (charFilters.size() > 0) {
|
||||
builder.field("char_filter", charFilters);
|
||||
}
|
||||
if (Strings.isNullOrEmpty(field) == false) {
|
||||
builder.field("field", field);
|
||||
}
|
||||
if (explain) {
|
||||
builder.field("explain", true);
|
||||
}
|
||||
if (attributes.length > 0) {
|
||||
builder.field("attributes", attributes);
|
||||
}
|
||||
if (Strings.isNullOrEmpty(normalizer) == false) {
|
||||
builder.field("normalizer", normalizer);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,183 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class AnalyzeResponse {
|
||||
|
||||
private static final String TOKENS = "tokens";
|
||||
private static final String DETAIL = "detail";
|
||||
|
||||
public static class AnalyzeToken {
|
||||
private String term;
|
||||
private int startOffset;
|
||||
private int endOffset;
|
||||
private int position;
|
||||
private int positionLength = 1;
|
||||
private String type;
|
||||
private final Map<String, Object> attributes = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
AnalyzeResponse.AnalyzeToken that = (AnalyzeResponse.AnalyzeToken) o;
|
||||
return startOffset == that.startOffset &&
|
||||
endOffset == that.endOffset &&
|
||||
position == that.position &&
|
||||
positionLength == that.positionLength &&
|
||||
Objects.equals(term, that.term) &&
|
||||
Objects.equals(attributes, that.attributes) &&
|
||||
Objects.equals(type, that.type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type);
|
||||
}
|
||||
|
||||
public String getTerm() {
|
||||
return this.term;
|
||||
}
|
||||
|
||||
private void setTerm(String term) {
|
||||
this.term = term;
|
||||
}
|
||||
|
||||
public int getStartOffset() {
|
||||
return this.startOffset;
|
||||
}
|
||||
|
||||
private void setStartOffset(int startOffset) {
|
||||
this.startOffset = startOffset;
|
||||
}
|
||||
|
||||
public int getEndOffset() {
|
||||
return this.endOffset;
|
||||
}
|
||||
|
||||
private void setEndOffset(int endOffset) {
|
||||
this.endOffset = endOffset;
|
||||
}
|
||||
|
||||
public int getPosition() {
|
||||
return this.position;
|
||||
}
|
||||
|
||||
private void setPosition(int position) {
|
||||
this.position = position;
|
||||
}
|
||||
|
||||
public int getPositionLength() {
|
||||
return this.positionLength;
|
||||
}
|
||||
|
||||
private void setPositionLength(int positionLength) {
|
||||
this.positionLength = positionLength;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return this.type;
|
||||
}
|
||||
|
||||
private void setType(String type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
public Map<String, Object> getAttributes() {
|
||||
return this.attributes;
|
||||
}
|
||||
|
||||
private void setAttribute(String key, Object value) {
|
||||
this.attributes.put(key, value);
|
||||
}
|
||||
|
||||
private static final ObjectParser<AnalyzeToken, Void> PARSER
|
||||
= new ObjectParser<>("analyze_token", AnalyzeToken::setAttribute, AnalyzeToken::new);
|
||||
static {
|
||||
PARSER.declareString(AnalyzeToken::setTerm, new ParseField("token"));
|
||||
PARSER.declareString(AnalyzeToken::setType, new ParseField("type"));
|
||||
PARSER.declareInt(AnalyzeToken::setPosition, new ParseField("position"));
|
||||
PARSER.declareInt(AnalyzeToken::setStartOffset, new ParseField("start_offset"));
|
||||
PARSER.declareInt(AnalyzeToken::setEndOffset, new ParseField("end_offset"));
|
||||
PARSER.declareInt(AnalyzeToken::setPositionLength, new ParseField("positionLength"));
|
||||
}
|
||||
|
||||
public static AnalyzeToken fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
}
|
||||
|
||||
private final DetailAnalyzeResponse detail;
|
||||
private final List<AnalyzeResponse.AnalyzeToken> tokens;
|
||||
|
||||
private AnalyzeResponse(List<AnalyzeResponse.AnalyzeToken> tokens, DetailAnalyzeResponse detail) {
|
||||
this.tokens = tokens;
|
||||
this.detail = detail;
|
||||
}
|
||||
|
||||
public List<AnalyzeResponse.AnalyzeToken> getTokens() {
|
||||
return this.tokens;
|
||||
}
|
||||
|
||||
public DetailAnalyzeResponse detail() {
|
||||
return this.detail;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<AnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>("analyze_response",
|
||||
true, args -> new AnalyzeResponse((List<AnalyzeResponse.AnalyzeToken>) args[0], (DetailAnalyzeResponse) args[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeToken.PARSER, new ParseField(TOKENS));
|
||||
PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(DETAIL));
|
||||
}
|
||||
|
||||
public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
AnalyzeResponse that = (AnalyzeResponse) o;
|
||||
return Objects.equals(detail, that.detail) &&
|
||||
Objects.equals(tokens, that.tokens);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(detail, tokens);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,214 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class DetailAnalyzeResponse {
|
||||
|
||||
private final boolean customAnalyzer;
|
||||
private final AnalyzeTokenList analyzer;
|
||||
private final CharFilteredText[] charfilters;
|
||||
private final AnalyzeTokenList tokenizer;
|
||||
private final AnalyzeTokenList[] tokenfilters;
|
||||
|
||||
private DetailAnalyzeResponse(boolean customAnalyzer,
|
||||
AnalyzeTokenList analyzer,
|
||||
List<CharFilteredText> charfilters,
|
||||
AnalyzeTokenList tokenizer,
|
||||
List<AnalyzeTokenList> tokenfilters) {
|
||||
this.customAnalyzer = customAnalyzer;
|
||||
this.analyzer = analyzer;
|
||||
this.charfilters = charfilters == null ? null : charfilters.toArray(new CharFilteredText[]{});
|
||||
this.tokenizer = tokenizer;
|
||||
this.tokenfilters = tokenfilters == null ? null : tokenfilters.toArray(new AnalyzeTokenList[]{});
|
||||
}
|
||||
|
||||
public AnalyzeTokenList analyzer() {
|
||||
return this.analyzer;
|
||||
}
|
||||
|
||||
public CharFilteredText[] charfilters() {
|
||||
return this.charfilters;
|
||||
}
|
||||
|
||||
public AnalyzeTokenList tokenizer() {
|
||||
return tokenizer;
|
||||
}
|
||||
|
||||
public AnalyzeTokenList[] tokenfilters() {
|
||||
return tokenfilters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
DetailAnalyzeResponse that = (DetailAnalyzeResponse) o;
|
||||
return customAnalyzer == that.customAnalyzer &&
|
||||
Objects.equals(analyzer, that.analyzer) &&
|
||||
Arrays.equals(charfilters, that.charfilters) &&
|
||||
Objects.equals(tokenizer, that.tokenizer) &&
|
||||
Arrays.equals(tokenfilters, that.tokenfilters);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(customAnalyzer, analyzer, tokenizer);
|
||||
result = 31 * result + Arrays.hashCode(charfilters);
|
||||
result = 31 * result + Arrays.hashCode(tokenfilters);
|
||||
return result;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static final ConstructingObjectParser<DetailAnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>("detail",
|
||||
true, args -> new DetailAnalyzeResponse(
|
||||
(boolean) args[0],
|
||||
(AnalyzeTokenList) args[1],
|
||||
(List<CharFilteredText>)args[2],
|
||||
(AnalyzeTokenList) args[3],
|
||||
(List<AnalyzeTokenList>)args[4]));
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(constructorArg(), new ParseField("custom_analyzer"));
|
||||
PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("analyzer"));
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField("charfilters"));
|
||||
PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("tokenizer"));
|
||||
PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("tokenfilters"));
|
||||
}
|
||||
|
||||
public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
public static class AnalyzeTokenList {
|
||||
private final String name;
|
||||
private final AnalyzeResponse.AnalyzeToken[] tokens;
|
||||
|
||||
private static final String TOKENS = "tokens";
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
AnalyzeTokenList that = (AnalyzeTokenList) o;
|
||||
return Objects.equals(name, that.name) &&
|
||||
Arrays.equals(tokens, that.tokens);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(name);
|
||||
result = 31 * result + Arrays.hashCode(tokens);
|
||||
return result;
|
||||
}
|
||||
|
||||
public AnalyzeTokenList(String name, List<AnalyzeResponse.AnalyzeToken> tokens) {
|
||||
this.name = name;
|
||||
this.tokens = tokens.toArray(new AnalyzeResponse.AnalyzeToken[]{});
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public AnalyzeResponse.AnalyzeToken[] getTokens() {
|
||||
return tokens;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<AnalyzeTokenList, Void> PARSER = new ConstructingObjectParser<>("token_list",
|
||||
true, args -> new AnalyzeTokenList((String) args[0],
|
||||
(List<AnalyzeResponse.AnalyzeToken>)args[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(constructorArg(), new ParseField("name"));
|
||||
PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p),
|
||||
new ParseField("tokens"));
|
||||
}
|
||||
|
||||
public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class CharFilteredText {
|
||||
private final String name;
|
||||
private final String[] texts;
|
||||
|
||||
CharFilteredText(String name, String[] texts) {
|
||||
this.name = name;
|
||||
if (texts != null) {
|
||||
this.texts = texts;
|
||||
} else {
|
||||
this.texts = Strings.EMPTY_ARRAY;
|
||||
}
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String[] getTexts() {
|
||||
return texts;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<CharFilteredText, Void> PARSER = new ConstructingObjectParser<>("char_filtered_text",
|
||||
true, args -> new CharFilteredText((String) args[0], ((List<String>) args[1]).toArray(new String[0])));
|
||||
|
||||
static {
|
||||
PARSER.declareString(constructorArg(), new ParseField("name"));
|
||||
PARSER.declareStringArray(constructorArg(), new ParseField("filtered_text"));
|
||||
}
|
||||
|
||||
public static CharFilteredText fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
CharFilteredText that = (CharFilteredText) o;
|
||||
return Objects.equals(name, that.name) &&
|
||||
Arrays.equals(texts, that.texts);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hash(name);
|
||||
result = 31 * result + Arrays.hashCode(texts);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -37,6 +37,7 @@ import java.util.Optional;
|
|||
public class FindFileStructureRequest implements Validatable, ToXContentFragment {
|
||||
|
||||
public static final ParseField LINES_TO_SAMPLE = new ParseField("lines_to_sample");
|
||||
public static final ParseField LINE_MERGE_SIZE_LIMIT = new ParseField("line_merge_size_limit");
|
||||
public static final ParseField TIMEOUT = new ParseField("timeout");
|
||||
public static final ParseField CHARSET = FileStructure.CHARSET;
|
||||
public static final ParseField FORMAT = FileStructure.FORMAT;
|
||||
|
@ -52,6 +53,7 @@ public class FindFileStructureRequest implements Validatable, ToXContentFragment
|
|||
public static final ParseField EXPLAIN = new ParseField("explain");
|
||||
|
||||
private Integer linesToSample;
|
||||
private Integer lineMergeSizeLimit;
|
||||
private TimeValue timeout;
|
||||
private String charset;
|
||||
private FileStructure.Format format;
|
||||
|
@ -77,6 +79,14 @@ public class FindFileStructureRequest implements Validatable, ToXContentFragment
|
|||
this.linesToSample = linesToSample;
|
||||
}
|
||||
|
||||
public Integer getLineMergeSizeLimit() {
|
||||
return lineMergeSizeLimit;
|
||||
}
|
||||
|
||||
public void setLineMergeSizeLimit(Integer lineMergeSizeLimit) {
|
||||
this.lineMergeSizeLimit = lineMergeSizeLimit;
|
||||
}
|
||||
|
||||
public TimeValue getTimeout() {
|
||||
return timeout;
|
||||
}
|
||||
|
@ -228,6 +238,9 @@ public class FindFileStructureRequest implements Validatable, ToXContentFragment
|
|||
if (linesToSample != null) {
|
||||
builder.field(LINES_TO_SAMPLE.getPreferredName(), linesToSample);
|
||||
}
|
||||
if (lineMergeSizeLimit != null) {
|
||||
builder.field(LINE_MERGE_SIZE_LIMIT.getPreferredName(), lineMergeSizeLimit);
|
||||
}
|
||||
if (timeout != null) {
|
||||
builder.field(TIMEOUT.getPreferredName(), timeout);
|
||||
}
|
||||
|
@ -270,8 +283,8 @@ public class FindFileStructureRequest implements Validatable, ToXContentFragment
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(linesToSample, timeout, charset, format, columnNames, hasHeaderRow, delimiter, grokPattern, timestampFormat,
|
||||
timestampField, explain, sample);
|
||||
return Objects.hash(linesToSample, lineMergeSizeLimit, timeout, charset, format, columnNames, hasHeaderRow, delimiter, grokPattern,
|
||||
timestampFormat, timestampField, explain, sample);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -287,6 +300,7 @@ public class FindFileStructureRequest implements Validatable, ToXContentFragment
|
|||
|
||||
FindFileStructureRequest that = (FindFileStructureRequest) other;
|
||||
return Objects.equals(this.linesToSample, that.linesToSample) &&
|
||||
Objects.equals(this.lineMergeSizeLimit, that.lineMergeSizeLimit) &&
|
||||
Objects.equals(this.timeout, that.timeout) &&
|
||||
Objects.equals(this.charset, that.charset) &&
|
||||
Objects.equals(this.format, that.format) &&
|
||||
|
|
|
@ -38,12 +38,14 @@ public class FieldStats implements ToXContentObject {
|
|||
public static final ParseField MAX_VALUE = new ParseField("max_value");
|
||||
public static final ParseField MEAN_VALUE = new ParseField("mean_value");
|
||||
public static final ParseField MEDIAN_VALUE = new ParseField("median_value");
|
||||
public static final ParseField EARLIEST = new ParseField("earliest");
|
||||
public static final ParseField LATEST = new ParseField("latest");
|
||||
public static final ParseField TOP_HITS = new ParseField("top_hits");
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<FieldStats, Void> PARSER = new ConstructingObjectParser<>("field_stats", true,
|
||||
a -> new FieldStats((long) a[0], (int) a[1], (Double) a[2], (Double) a[3], (Double) a[4], (Double) a[5],
|
||||
(List<Map<String, Object>>) a[6]));
|
||||
(String) a[6], (String) a[7], (List<Map<String, Object>>) a[8]));
|
||||
|
||||
static {
|
||||
PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT);
|
||||
|
@ -52,6 +54,8 @@ public class FieldStats implements ToXContentObject {
|
|||
PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), MAX_VALUE);
|
||||
PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), MEAN_VALUE);
|
||||
PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), MEDIAN_VALUE);
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), EARLIEST);
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LATEST);
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> p.mapOrdered(), TOP_HITS);
|
||||
}
|
||||
|
||||
|
@ -61,16 +65,20 @@ public class FieldStats implements ToXContentObject {
|
|||
private final Double maxValue;
|
||||
private final Double meanValue;
|
||||
private final Double medianValue;
|
||||
private final String earliestTimestamp;
|
||||
private final String latestTimestamp;
|
||||
private final List<Map<String, Object>> topHits;
|
||||
|
||||
FieldStats(long count, int cardinality, Double minValue, Double maxValue, Double meanValue, Double medianValue,
|
||||
List<Map<String, Object>> topHits) {
|
||||
String earliestTimestamp, String latestTimestamp, List<Map<String, Object>> topHits) {
|
||||
this.count = count;
|
||||
this.cardinality = cardinality;
|
||||
this.minValue = minValue;
|
||||
this.maxValue = maxValue;
|
||||
this.meanValue = meanValue;
|
||||
this.medianValue = medianValue;
|
||||
this.earliestTimestamp = earliestTimestamp;
|
||||
this.latestTimestamp = latestTimestamp;
|
||||
this.topHits = (topHits == null) ? Collections.emptyList() : Collections.unmodifiableList(topHits);
|
||||
}
|
||||
|
||||
|
@ -98,6 +106,14 @@ public class FieldStats implements ToXContentObject {
|
|||
return medianValue;
|
||||
}
|
||||
|
||||
public String getEarliestTimestamp() {
|
||||
return earliestTimestamp;
|
||||
}
|
||||
|
||||
public String getLatestTimestamp() {
|
||||
return latestTimestamp;
|
||||
}
|
||||
|
||||
public List<Map<String, Object>> getTopHits() {
|
||||
return topHits;
|
||||
}
|
||||
|
@ -120,6 +136,12 @@ public class FieldStats implements ToXContentObject {
|
|||
if (medianValue != null) {
|
||||
builder.field(MEDIAN_VALUE.getPreferredName(), toIntegerIfInteger(medianValue));
|
||||
}
|
||||
if (earliestTimestamp != null) {
|
||||
builder.field(EARLIEST.getPreferredName(), earliestTimestamp);
|
||||
}
|
||||
if (latestTimestamp != null) {
|
||||
builder.field(LATEST.getPreferredName(), latestTimestamp);
|
||||
}
|
||||
if (topHits.isEmpty() == false) {
|
||||
builder.field(TOP_HITS.getPreferredName(), topHits);
|
||||
}
|
||||
|
@ -140,7 +162,7 @@ public class FieldStats implements ToXContentObject {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
|
||||
return Objects.hash(count, cardinality, minValue, maxValue, meanValue, medianValue, topHits);
|
||||
return Objects.hash(count, cardinality, minValue, maxValue, meanValue, medianValue, earliestTimestamp, latestTimestamp, topHits);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -161,6 +183,8 @@ public class FieldStats implements ToXContentObject {
|
|||
Objects.equals(this.maxValue, that.maxValue) &&
|
||||
Objects.equals(this.meanValue, that.meanValue) &&
|
||||
Objects.equals(this.medianValue, that.medianValue) &&
|
||||
Objects.equals(this.earliestTimestamp, that.earliestTimestamp) &&
|
||||
Objects.equals(this.latestTimestamp, that.latestTimestamp) &&
|
||||
Objects.equals(this.topHits, that.topHits);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -318,11 +318,6 @@ public class XPackInfoResponse {
|
|||
return name;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String description() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public boolean available() {
|
||||
return available;
|
||||
}
|
||||
|
|
|
@ -291,7 +291,6 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase {
|
|||
assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), testDocs);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testGlobalParametersAndSingleRequest() throws Exception {
|
||||
createIndexWithMultipleShards("test");
|
||||
|
||||
|
@ -326,7 +325,6 @@ public class BulkProcessorIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat(blogs, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ"))));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testGlobalParametersAndBulkProcessor() throws Exception {
|
||||
createIndexWithMultipleShards("test");
|
||||
|
||||
|
|
|
@ -44,7 +44,6 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
|
||||
public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testGlobalPipelineOnBulkRequest() throws IOException {
|
||||
createFieldAddingPipleine("xyz", "fieldNameXYZ", "valueXYZ");
|
||||
|
||||
|
@ -83,7 +82,6 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldXYZ"), nullValue())));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testMixPipelineOnRequestAndGlobal() throws IOException {
|
||||
createFieldAddingPipleine("globalId", "fieldXYZ", "valueXYZ");
|
||||
createFieldAddingPipleine("perIndexId", "someNewField", "someValue");
|
||||
|
@ -153,7 +151,6 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
assertThat(hits, everyItem(hasType("global_type")));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testTypeGlobalAndPerRequest() throws IOException {
|
||||
BulkRequest request = new BulkRequest(null, "global_type");
|
||||
request.add(new IndexRequest("index1", "local_type", "1")
|
||||
|
@ -171,7 +168,6 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
.and(hasType("global_type"))));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testGlobalRouting() throws IOException {
|
||||
createIndexWithMultipleShards("index");
|
||||
BulkRequest request = new BulkRequest(null);
|
||||
|
@ -189,7 +185,6 @@ public class BulkRequestWithGlobalParametersIT extends ESRestHighLevelClientTest
|
|||
assertThat(hits, containsInAnyOrder(hasId("1"), hasId("2")));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testMixLocalAndGlobalRouting() throws IOException {
|
||||
BulkRequest request = new BulkRequest(null);
|
||||
request.routing("globalRouting");
|
||||
|
|
|
@ -140,7 +140,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
|
||||
@After
|
||||
public void cleanUpTransforms() throws IOException {
|
||||
public void cleanUpTransforms() throws Exception {
|
||||
for (String transformId : transformsToClean) {
|
||||
highLevelClient().dataFrame().stopDataFrameTransform(
|
||||
new StopDataFrameTransformRequest(transformId, Boolean.TRUE, null), RequestOptions.DEFAULT);
|
||||
|
@ -152,6 +152,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
|
||||
transformsToClean = new ArrayList<>();
|
||||
waitForPendingTasks(adminClient());
|
||||
}
|
||||
|
||||
public void testCreateDelete() throws IOException {
|
||||
|
|
|
@ -28,8 +28,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias;
|
|||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
|
@ -58,6 +56,8 @@ import org.elasticsearch.action.support.WriteRequest;
|
|||
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.core.ShardsAcknowledgedResponse;
|
||||
import org.elasticsearch.client.indices.AnalyzeRequest;
|
||||
import org.elasticsearch.client.indices.AnalyzeResponse;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.client.indices.CreateIndexResponse;
|
||||
import org.elasticsearch.client.indices.FreezeIndexRequest;
|
||||
|
@ -1852,12 +1852,12 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
AnalyzeRequest noindexRequest = new AnalyzeRequest().text("One two three").analyzer("english");
|
||||
AnalyzeRequest noindexRequest = AnalyzeRequest.withGlobalAnalyzer("english", "One two three");
|
||||
AnalyzeResponse noindexResponse = execute(noindexRequest, client.indices()::analyze, client.indices()::analyzeAsync);
|
||||
|
||||
assertThat(noindexResponse.getTokens(), hasSize(3));
|
||||
|
||||
AnalyzeRequest detailsRequest = new AnalyzeRequest().text("One two three").analyzer("english").explain(true);
|
||||
AnalyzeRequest detailsRequest = AnalyzeRequest.withGlobalAnalyzer("english", "One two three").explain(true);
|
||||
AnalyzeResponse detailsResponse = execute(detailsRequest, client.indices()::analyze, client.indices()::analyzeAsync);
|
||||
|
||||
assertNotNull(detailsResponse.detail());
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.action.ActionRequestValidationException;
|
|||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
|
@ -45,6 +44,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType;
|
|||
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.client.indices.AnalyzeRequest;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.client.indices.GetFieldMappingsRequest;
|
||||
import org.elasticsearch.client.indices.GetIndexRequest;
|
||||
|
@ -86,18 +86,14 @@ import static org.hamcrest.Matchers.nullValue;
|
|||
public class IndicesRequestConvertersTests extends ESTestCase {
|
||||
|
||||
public void testAnalyzeRequest() throws Exception {
|
||||
AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest()
|
||||
.text("Here is some text")
|
||||
.index("test_index")
|
||||
.analyzer("test_analyzer");
|
||||
AnalyzeRequest indexAnalyzeRequest
|
||||
= AnalyzeRequest.withIndexAnalyzer("test_index", "test_analyzer", "Here is some text");
|
||||
|
||||
Request request = IndicesRequestConverters.analyze(indexAnalyzeRequest);
|
||||
assertThat(request.getEndpoint(), equalTo("/test_index/_analyze"));
|
||||
RequestConvertersTests.assertToXContentBody(indexAnalyzeRequest, request.getEntity());
|
||||
|
||||
AnalyzeRequest analyzeRequest = new AnalyzeRequest()
|
||||
.text("more text")
|
||||
.analyzer("test_analyzer");
|
||||
AnalyzeRequest analyzeRequest = AnalyzeRequest.withGlobalAnalyzer("test_analyzer", "more text");
|
||||
assertThat(IndicesRequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze"));
|
||||
}
|
||||
|
||||
|
|
|
@ -71,17 +71,14 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals(LicenseStatus.ACTIVE, info.getLicenseInfo().getStatus());
|
||||
|
||||
FeatureSet graph = info.getFeatureSetsInfo().getFeatureSets().get("graph");
|
||||
assertNotNull(graph.description());
|
||||
assertTrue(graph.available());
|
||||
assertTrue(graph.enabled());
|
||||
assertNull(graph.nativeCodeInfo());
|
||||
FeatureSet monitoring = info.getFeatureSetsInfo().getFeatureSets().get("monitoring");
|
||||
assertNotNull(monitoring.description());
|
||||
assertTrue(monitoring.available());
|
||||
assertTrue(monitoring.enabled());
|
||||
assertNull(monitoring.nativeCodeInfo());
|
||||
FeatureSet ml = info.getFeatureSetsInfo().getFeatureSets().get("ml");
|
||||
assertNotNull(ml.description());
|
||||
assertTrue(ml.available());
|
||||
assertTrue(ml.enabled());
|
||||
assertEquals(mainResponse.getVersion().getNumber(), ml.nativeCodeInfo().get("version").toString());
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.action.DocWriteRequest;
|
|||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkShardRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
|
@ -57,6 +56,7 @@ import org.elasticsearch.client.RequestConverters.EndpointBuilder;
|
|||
import org.elasticsearch.client.core.CountRequest;
|
||||
import org.elasticsearch.client.core.MultiTermVectorsRequest;
|
||||
import org.elasticsearch.client.core.TermVectorsRequest;
|
||||
import org.elasticsearch.client.indices.AnalyzeRequest;
|
||||
import org.elasticsearch.common.CheckedBiConsumer;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
|
@ -436,7 +436,11 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
reindexRequest.setDestRouting("=cat");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
reindexRequest.setSize(randomIntBetween(100, 1000));
|
||||
if (randomBoolean()) {
|
||||
reindexRequest.setMaxDocs(randomIntBetween(100, 1000));
|
||||
} else {
|
||||
reindexRequest.setSize(randomIntBetween(100, 1000));
|
||||
}
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
reindexRequest.setAbortOnVersionConflict(false);
|
||||
|
@ -488,8 +492,12 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
if (randomBoolean()) {
|
||||
int size = randomIntBetween(100, 1000);
|
||||
updateByQueryRequest.setSize(size);
|
||||
expectedParams.put("size", Integer.toString(size));
|
||||
if (randomBoolean()) {
|
||||
updateByQueryRequest.setMaxDocs(size);
|
||||
} else {
|
||||
updateByQueryRequest.setSize(size);
|
||||
}
|
||||
expectedParams.put("max_docs", Integer.toString(size));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
updateByQueryRequest.setAbortOnVersionConflict(false);
|
||||
|
@ -538,8 +546,12 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
if (randomBoolean()) {
|
||||
int size = randomIntBetween(100, 1000);
|
||||
deleteByQueryRequest.setSize(size);
|
||||
expectedParams.put("size", Integer.toString(size));
|
||||
if (randomBoolean()) {
|
||||
deleteByQueryRequest.setMaxDocs(size);
|
||||
} else {
|
||||
deleteByQueryRequest.setSize(size);
|
||||
}
|
||||
expectedParams.put("max_docs", Integer.toString(size));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
deleteByQueryRequest.setAbortOnVersionConflict(false);
|
||||
|
@ -1643,18 +1655,14 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testAnalyzeRequest() throws Exception {
|
||||
AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest()
|
||||
.text("Here is some text")
|
||||
.index("test_index")
|
||||
.analyzer("test_analyzer");
|
||||
AnalyzeRequest indexAnalyzeRequest
|
||||
= AnalyzeRequest.withIndexAnalyzer("test_index", "test_analyzer", "Here is some text");
|
||||
|
||||
Request request = RequestConverters.analyze(indexAnalyzeRequest);
|
||||
assertThat(request.getEndpoint(), equalTo("/test_index/_analyze"));
|
||||
assertToXContentBody(indexAnalyzeRequest, request.getEntity());
|
||||
|
||||
AnalyzeRequest analyzeRequest = new AnalyzeRequest()
|
||||
.text("more text")
|
||||
.analyzer("test_analyzer");
|
||||
AnalyzeRequest analyzeRequest = AnalyzeRequest.withGlobalAnalyzer("test_analyzer", "more text");
|
||||
assertThat(RequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze"));
|
||||
}
|
||||
|
||||
|
|
|
@ -710,8 +710,8 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
"indices.exists_type",
|
||||
"indices.get_upgrade",
|
||||
"indices.put_alias",
|
||||
"render_search_template",
|
||||
"scripts_painless_execute"
|
||||
"scripts_painless_execute",
|
||||
"render_search_template"
|
||||
};
|
||||
//These API are not required for high-level client feature completeness
|
||||
String[] notRequiredApi = new String[] {
|
||||
|
@ -731,7 +731,6 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
"nodes.hot_threads",
|
||||
"nodes.usage",
|
||||
"nodes.reload_secure_settings",
|
||||
"scripts_painless_context",
|
||||
"search_shards",
|
||||
};
|
||||
List<String> booleanReturnMethods = Arrays.asList(
|
||||
|
|
|
@ -41,9 +41,13 @@ import org.elasticsearch.common.xcontent.XContentType;
|
|||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.snapshots.RestoreInfo;
|
||||
import org.elasticsearch.snapshots.SnapshotInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
|
@ -139,6 +143,9 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
CreateSnapshotRequest request = new CreateSnapshotRequest(repository, snapshot);
|
||||
boolean waitForCompletion = randomBoolean();
|
||||
request.waitForCompletion(waitForCompletion);
|
||||
if (randomBoolean()) {
|
||||
request.userMetadata(randomUserMetadata());
|
||||
}
|
||||
request.partial(randomBoolean());
|
||||
request.includeGlobalState(randomBoolean());
|
||||
|
||||
|
@ -167,6 +174,8 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
CreateSnapshotResponse putSnapshotResponse1 = createTestSnapshot(createSnapshotRequest1);
|
||||
CreateSnapshotRequest createSnapshotRequest2 = new CreateSnapshotRequest(repository, snapshot2);
|
||||
createSnapshotRequest2.waitForCompletion(true);
|
||||
Map<String, Object> originalMetadata = randomUserMetadata();
|
||||
createSnapshotRequest2.userMetadata(originalMetadata);
|
||||
CreateSnapshotResponse putSnapshotResponse2 = createTestSnapshot(createSnapshotRequest2);
|
||||
// check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
|
||||
assertEquals(RestStatus.OK, putSnapshotResponse1.status());
|
||||
|
@ -186,6 +195,15 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
assertEquals(2, response.getSnapshots().size());
|
||||
assertThat(response.getSnapshots().stream().map((s) -> s.snapshotId().getName()).collect(Collectors.toList()),
|
||||
contains("test_snapshot1", "test_snapshot2"));
|
||||
Optional<Map<String, Object>> returnedMetadata = response.getSnapshots().stream()
|
||||
.filter(s -> s.snapshotId().getName().equals("test_snapshot2"))
|
||||
.findFirst()
|
||||
.map(SnapshotInfo::userMetadata);
|
||||
if (returnedMetadata.isPresent()) {
|
||||
assertEquals(originalMetadata, returnedMetadata.get());
|
||||
} else {
|
||||
assertNull("retrieved metadata is null, expected non-null metadata", originalMetadata);
|
||||
}
|
||||
}
|
||||
|
||||
public void testSnapshotsStatus() throws IOException {
|
||||
|
@ -231,6 +249,9 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(testRepository, testSnapshot);
|
||||
createSnapshotRequest.indices(testIndex);
|
||||
createSnapshotRequest.waitForCompletion(true);
|
||||
if (randomBoolean()) {
|
||||
createSnapshotRequest.userMetadata(randomUserMetadata());
|
||||
}
|
||||
CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest);
|
||||
assertEquals(RestStatus.OK, createSnapshotResponse.status());
|
||||
|
||||
|
@ -261,6 +282,9 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot);
|
||||
createSnapshotRequest.waitForCompletion(true);
|
||||
if (randomBoolean()) {
|
||||
createSnapshotRequest.userMetadata(randomUserMetadata());
|
||||
}
|
||||
CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest);
|
||||
// check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
|
||||
assertEquals(RestStatus.OK, createSnapshotResponse.status());
|
||||
|
@ -270,4 +294,28 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
|
|||
|
||||
assertTrue(response.isAcknowledged());
|
||||
}
|
||||
|
||||
private static Map<String, Object> randomUserMetadata() {
|
||||
if (randomBoolean()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Map<String, Object> metadata = new HashMap<>();
|
||||
long fields = randomLongBetween(0, 4);
|
||||
for (int i = 0; i < fields; i++) {
|
||||
if (randomBoolean()) {
|
||||
metadata.put(randomValueOtherThanMany(metadata::containsKey, () -> randomAlphaOfLengthBetween(2,10)),
|
||||
randomAlphaOfLengthBetween(5, 5));
|
||||
} else {
|
||||
Map<String, Object> nested = new HashMap<>();
|
||||
long nestedFields = randomLongBetween(0, 4);
|
||||
for (int j = 0; j < nestedFields; j++) {
|
||||
nested.put(randomValueOtherThanMany(nested::containsKey, () -> randomAlphaOfLengthBetween(2,10)),
|
||||
randomAlphaOfLengthBetween(5, 5));
|
||||
}
|
||||
metadata.put(randomValueOtherThanMany(metadata::containsKey, () -> randomAlphaOfLengthBetween(2,10)), nested);
|
||||
}
|
||||
}
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,8 +70,7 @@ public class XPackInfoResponseTests extends
|
|||
private FeatureSetsInfo convertHlrcToInternal(org.elasticsearch.client.xpack.XPackInfoResponse.FeatureSetsInfo featureSetsInfo) {
|
||||
return featureSetsInfo != null
|
||||
? new FeatureSetsInfo(featureSetsInfo.getFeatureSets().values().stream()
|
||||
.map(fs -> new FeatureSet(fs.name(), fs.description(), fs.available(), fs.enabled(),
|
||||
fs.nativeCodeInfo()))
|
||||
.map(fs -> new FeatureSet(fs.name(), fs.available(), fs.enabled(), fs.nativeCodeInfo()))
|
||||
.collect(Collectors.toSet()))
|
||||
: null;
|
||||
}
|
||||
|
@ -169,7 +168,6 @@ public class XPackInfoResponseTests extends
|
|||
private FeatureSet randomFeatureSet() {
|
||||
return new FeatureSet(
|
||||
randomAlphaOfLength(5),
|
||||
randomBoolean() ? null : randomAlphaOfLength(20),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomNativeCodeInfo());
|
||||
|
|
|
@ -106,6 +106,7 @@ public class CcrStatsResponseTests extends AbstractResponseTestCase<CcrStatsActi
|
|||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
randomNonNegativeLong(),
|
||||
Collections.emptyNavigableMap(),
|
||||
randomLong(),
|
||||
randomBoolean() ? new ElasticsearchException("fatal error") : null);
|
||||
|
@ -190,6 +191,8 @@ public class CcrStatsResponseTests extends AbstractResponseTestCase<CcrStatsActi
|
|||
equalTo(expectedShardFollowStats.followerMappingVersion()));
|
||||
assertThat(actualShardFollowStats.getFollowerSettingsVersion(),
|
||||
equalTo(expectedShardFollowStats.followerSettingsVersion()));
|
||||
assertThat(actualShardFollowStats.getFollowerAliasesVersion(),
|
||||
equalTo(expectedShardFollowStats.followerAliasesVersion()));
|
||||
assertThat(actualShardFollowStats.getTotalReadTimeMillis(),
|
||||
equalTo(expectedShardFollowStats.totalReadTimeMillis()));
|
||||
assertThat(actualShardFollowStats.getSuccessfulReadRequests(),
|
||||
|
|
|
@ -93,6 +93,8 @@ public class FollowStatsResponseTests extends AbstractResponseTestCase<FollowSta
|
|||
equalTo(expectedShardFollowStats.followerMappingVersion()));
|
||||
assertThat(actualShardFollowStats.getFollowerSettingsVersion(),
|
||||
equalTo(expectedShardFollowStats.followerSettingsVersion()));
|
||||
assertThat(actualShardFollowStats.getFollowerAliasesVersion(),
|
||||
equalTo(expectedShardFollowStats.followerAliasesVersion()));
|
||||
assertThat(actualShardFollowStats.getTotalReadTimeMillis(),
|
||||
equalTo(expectedShardFollowStats.totalReadTimeMillis()));
|
||||
assertThat(actualShardFollowStats.getSuccessfulReadRequests(),
|
||||
|
|
|
@ -824,9 +824,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// tag::reindex-request-conflicts
|
||||
request.setConflicts("proceed"); // <1>
|
||||
// end::reindex-request-conflicts
|
||||
// tag::reindex-request-size
|
||||
request.setSize(10); // <1>
|
||||
// end::reindex-request-size
|
||||
// tag::reindex-request-maxDocs
|
||||
request.setMaxDocs(10); // <1>
|
||||
// end::reindex-request-maxDocs
|
||||
// tag::reindex-request-sourceSize
|
||||
request.setSourceBatchSize(100); // <1>
|
||||
// end::reindex-request-sourceSize
|
||||
|
@ -1026,9 +1026,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// tag::update-by-query-request-query
|
||||
request.setQuery(new TermQueryBuilder("user", "kimchy")); // <1>
|
||||
// end::update-by-query-request-query
|
||||
// tag::update-by-query-request-size
|
||||
request.setSize(10); // <1>
|
||||
// end::update-by-query-request-size
|
||||
// tag::update-by-query-request-maxDocs
|
||||
request.setMaxDocs(10); // <1>
|
||||
// end::update-by-query-request-maxDocs
|
||||
// tag::update-by-query-request-scrollSize
|
||||
request.setBatchSize(100); // <1>
|
||||
// end::update-by-query-request-scrollSize
|
||||
|
@ -1148,9 +1148,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
// tag::delete-by-query-request-query
|
||||
request.setQuery(new TermQueryBuilder("user", "kimchy")); // <1>
|
||||
// end::delete-by-query-request-query
|
||||
// tag::delete-by-query-request-size
|
||||
request.setSize(10); // <1>
|
||||
// end::delete-by-query-request-size
|
||||
// tag::delete-by-query-request-maxDocs
|
||||
request.setMaxDocs(10); // <1>
|
||||
// end::delete-by-query-request-maxDocs
|
||||
// tag::delete-by-query-request-scrollSize
|
||||
request.setBatchSize(100); // <1>
|
||||
// end::delete-by-query-request-scrollSize
|
||||
|
|
|
@ -74,7 +74,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
private List<String> transformsToClean = new ArrayList<>();
|
||||
|
||||
@After
|
||||
public void cleanUpTransforms() throws IOException {
|
||||
public void cleanUpTransforms() throws Exception {
|
||||
for (String transformId : transformsToClean) {
|
||||
highLevelClient().dataFrame().stopDataFrameTransform(
|
||||
new StopDataFrameTransformRequest(transformId, Boolean.TRUE, TimeValue.timeValueSeconds(20)), RequestOptions.DEFAULT);
|
||||
|
@ -86,6 +86,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
}
|
||||
|
||||
transformsToClean = new ArrayList<>();
|
||||
waitForPendingTasks(adminClient());
|
||||
}
|
||||
|
||||
private void createIndex(String indexName) throws IOException {
|
||||
|
|
|
@ -26,9 +26,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias;
|
|||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
|
||||
import org.elasticsearch.action.admin.indices.analyze.DetailAnalyzeResponse;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
|
@ -62,8 +59,11 @@ import org.elasticsearch.client.RequestOptions;
|
|||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.SyncedFlushResponse;
|
||||
import org.elasticsearch.client.core.ShardsAcknowledgedResponse;
|
||||
import org.elasticsearch.client.indices.AnalyzeRequest;
|
||||
import org.elasticsearch.client.indices.AnalyzeResponse;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.client.indices.CreateIndexResponse;
|
||||
import org.elasticsearch.client.indices.DetailAnalyzeResponse;
|
||||
import org.elasticsearch.client.indices.FreezeIndexRequest;
|
||||
import org.elasticsearch.client.indices.GetFieldMappingsRequest;
|
||||
import org.elasticsearch.client.indices.GetFieldMappingsResponse;
|
||||
|
@ -2418,32 +2418,29 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
|
||||
{
|
||||
// tag::analyze-builtin-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.text("Some text to analyze", "Some more text to analyze"); // <1>
|
||||
request.analyzer("english"); // <2>
|
||||
AnalyzeRequest request = AnalyzeRequest.withGlobalAnalyzer("english", // <1>
|
||||
"Some text to analyze", "Some more text to analyze"); // <2>
|
||||
// end::analyze-builtin-request
|
||||
}
|
||||
|
||||
{
|
||||
// tag::analyze-custom-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.text("<b>Some text to analyze</b>");
|
||||
request.addCharFilter("html_strip"); // <1>
|
||||
request.tokenizer("standard"); // <2>
|
||||
request.addTokenFilter("lowercase"); // <3>
|
||||
|
||||
Map<String, Object> stopFilter = new HashMap<>();
|
||||
stopFilter.put("type", "stop");
|
||||
stopFilter.put("stopwords", new String[]{ "to" }); // <4>
|
||||
request.addTokenFilter(stopFilter); // <5>
|
||||
stopFilter.put("stopwords", new String[]{ "to" }); // <1>
|
||||
AnalyzeRequest request = AnalyzeRequest.buildCustomAnalyzer("standard") // <2>
|
||||
.addCharFilter("html_strip") // <3>
|
||||
.addTokenFilter("lowercase") // <4>
|
||||
.addTokenFilter(stopFilter) // <5>
|
||||
.build("<b>Some text to analyze</b>");
|
||||
// end::analyze-custom-request
|
||||
}
|
||||
|
||||
{
|
||||
// tag::analyze-custom-normalizer-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.text("<b>BaR</b>");
|
||||
request.addTokenFilter("lowercase");
|
||||
AnalyzeRequest request = AnalyzeRequest.buildCustomNormalizer()
|
||||
.addTokenFilter("lowercase")
|
||||
.build("<b>BaR</b>");
|
||||
// end::analyze-custom-normalizer-request
|
||||
|
||||
// tag::analyze-request-explain
|
||||
|
@ -2484,10 +2481,11 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
|
||||
{
|
||||
// tag::analyze-index-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.index("my_index"); // <1>
|
||||
request.analyzer("my_analyzer"); // <2>
|
||||
request.text("some text to analyze");
|
||||
AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer(
|
||||
"my_index", // <1>
|
||||
"my_analyzer", // <2>
|
||||
"some text to analyze"
|
||||
);
|
||||
// end::analyze-index-request
|
||||
|
||||
// tag::analyze-execute-listener
|
||||
|
@ -2505,10 +2503,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
// end::analyze-execute-listener
|
||||
|
||||
// use a built-in analyzer in the test
|
||||
request = new AnalyzeRequest();
|
||||
request.index("my_index");
|
||||
request.field("my_field");
|
||||
request.text("some text to analyze");
|
||||
request = AnalyzeRequest.withField("my_index", "my_field", "some text to analyze");
|
||||
// Use a blocking listener in the test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
@ -2522,19 +2517,17 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
|
||||
{
|
||||
// tag::analyze-index-normalizer-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.index("my_index"); // <1>
|
||||
request.normalizer("my_normalizer"); // <2>
|
||||
request.text("some text to analyze");
|
||||
AnalyzeRequest request = AnalyzeRequest.withNormalizer(
|
||||
"my_index", // <1>
|
||||
"my_normalizer", // <2>
|
||||
"some text to analyze"
|
||||
);
|
||||
// end::analyze-index-normalizer-request
|
||||
}
|
||||
|
||||
{
|
||||
// tag::analyze-field-request
|
||||
AnalyzeRequest request = new AnalyzeRequest();
|
||||
request.index("my_index");
|
||||
request.field("my_field");
|
||||
request.text("some text to analyze");
|
||||
AnalyzeRequest request = AnalyzeRequest.withField("my_index", "my_field", "some text to analyze");
|
||||
// end::analyze-field-request
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class AnalyzeGlobalRequestTests extends AnalyzeRequestTests {
|
||||
|
||||
private static final Map<String, Object> charFilterConfig = new HashMap<>();
|
||||
static {
|
||||
charFilterConfig.put("type", "html_strip");
|
||||
}
|
||||
|
||||
private static final Map<String, Object> tokenFilterConfig = new HashMap<>();
|
||||
static {
|
||||
tokenFilterConfig.put("type", "synonym");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AnalyzeRequest createClientTestInstance() {
|
||||
int option = random().nextInt(3);
|
||||
switch (option) {
|
||||
case 0:
|
||||
return AnalyzeRequest.withGlobalAnalyzer("my_analyzer", "some text", "some more text");
|
||||
case 1:
|
||||
return AnalyzeRequest.buildCustomAnalyzer("my_tokenizer")
|
||||
.addCharFilter("my_char_filter")
|
||||
.addCharFilter(charFilterConfig)
|
||||
.addTokenFilter("my_token_filter")
|
||||
.addTokenFilter(tokenFilterConfig)
|
||||
.build("some text", "some more text");
|
||||
case 2:
|
||||
return AnalyzeRequest.buildCustomNormalizer()
|
||||
.addCharFilter("my_char_filter")
|
||||
.addCharFilter(charFilterConfig)
|
||||
.addTokenFilter("my_token_filter")
|
||||
.addTokenFilter(tokenFilterConfig)
|
||||
.build("some text", "some more text");
|
||||
}
|
||||
throw new IllegalStateException("nextInt(3) has returned a value greater than 2");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AnalyzeAction.Request doParseToServerInstance(XContentParser parser) throws IOException {
|
||||
return AnalyzeAction.Request.fromXContent(parser, null);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class AnalyzeIndexRequestTests extends AnalyzeRequestTests {
|
||||
|
||||
private static final Map<String, Object> charFilterConfig = new HashMap<>();
|
||||
static {
|
||||
charFilterConfig.put("type", "html_strip");
|
||||
}
|
||||
|
||||
private static final Map<String, Object> tokenFilterConfig = new HashMap<>();
|
||||
static {
|
||||
tokenFilterConfig.put("type", "synonym");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AnalyzeRequest createClientTestInstance() {
|
||||
int option = random().nextInt(5);
|
||||
switch (option) {
|
||||
case 0:
|
||||
return AnalyzeRequest.withField("index", "field", "some text", "some more text");
|
||||
case 1:
|
||||
return AnalyzeRequest.withIndexAnalyzer("index", "my_analyzer", "some text", "some more text");
|
||||
case 2:
|
||||
return AnalyzeRequest.withNormalizer("index", "my_normalizer", "text", "more text");
|
||||
case 3:
|
||||
return AnalyzeRequest.buildCustomAnalyzer("index", "my_tokenizer")
|
||||
.addCharFilter("my_char_filter")
|
||||
.addCharFilter(charFilterConfig)
|
||||
.addTokenFilter("my_token_filter")
|
||||
.addTokenFilter(tokenFilterConfig)
|
||||
.build("some text", "some more text");
|
||||
case 4:
|
||||
return AnalyzeRequest.buildCustomNormalizer("index")
|
||||
.addCharFilter("my_char_filter")
|
||||
.addCharFilter(charFilterConfig)
|
||||
.addTokenFilter("my_token_filter")
|
||||
.addTokenFilter(tokenFilterConfig)
|
||||
.build("some text", "some more text");
|
||||
}
|
||||
throw new IllegalStateException("nextInt(5) has returned a value greater than 4");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AnalyzeAction.Request doParseToServerInstance(XContentParser parser) throws IOException {
|
||||
return AnalyzeAction.Request.fromXContent(parser, "index");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
|
||||
import org.elasticsearch.client.AbstractRequestTestCase;
|
||||
|
||||
public abstract class AnalyzeRequestTests extends AbstractRequestTestCase<AnalyzeRequest, AnalyzeAction.Request> {
|
||||
|
||||
@Override
|
||||
protected void assertInstances(AnalyzeAction.Request serverInstance, AnalyzeRequest clientTestInstance) {
|
||||
assertEquals(serverInstance.index(), clientTestInstance.index());
|
||||
assertArrayEquals(serverInstance.text(), clientTestInstance.text());
|
||||
assertEquals(serverInstance.analyzer(), clientTestInstance.analyzer());
|
||||
assertEquals(serverInstance.normalizer(), clientTestInstance.normalizer());
|
||||
assertEquals(serverInstance.charFilters().size(), clientTestInstance.charFilters().size());
|
||||
for (int i = 0; i < serverInstance.charFilters().size(); i++) {
|
||||
assertEquals(serverInstance.charFilters().get(i).name, clientTestInstance.charFilters().get(i).name);
|
||||
assertEquals(serverInstance.charFilters().get(i).definition, clientTestInstance.charFilters().get(i).definition);
|
||||
}
|
||||
assertEquals(serverInstance.tokenFilters().size(), clientTestInstance.tokenFilters().size());
|
||||
for (int i = 0; i < serverInstance.tokenFilters().size(); i++) {
|
||||
assertEquals(serverInstance.tokenFilters().get(i).name, clientTestInstance.tokenFilters().get(i).name);
|
||||
assertEquals(serverInstance.tokenFilters().get(i).definition, clientTestInstance.tokenFilters().get(i).definition);
|
||||
}
|
||||
if (serverInstance.tokenizer() != null) {
|
||||
assertEquals(serverInstance.tokenizer().name, clientTestInstance.tokenizer().name);
|
||||
assertEquals(serverInstance.tokenizer().definition, clientTestInstance.tokenizer().definition);
|
||||
}
|
||||
else {
|
||||
assertNull(clientTestInstance.tokenizer());
|
||||
}
|
||||
assertEquals(serverInstance.field(), clientTestInstance.field());
|
||||
assertEquals(serverInstance.explain(), clientTestInstance.explain());
|
||||
assertArrayEquals(serverInstance.attributes(), clientTestInstance.attributes());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,174 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
|
||||
import org.elasticsearch.client.AbstractResponseTestCase;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class AnalyzeResponseTests extends AbstractResponseTestCase<AnalyzeAction.Response, AnalyzeResponse> {
|
||||
|
||||
@Override
|
||||
protected AnalyzeAction.Response createServerTestInstance() {
|
||||
int tokenCount = randomIntBetween(1, 30);
|
||||
AnalyzeAction.AnalyzeToken[] tokens = new AnalyzeAction.AnalyzeToken[tokenCount];
|
||||
for (int i = 0; i < tokenCount; i++) {
|
||||
tokens[i] = randomToken();
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
AnalyzeAction.CharFilteredText[] charfilters = null;
|
||||
AnalyzeAction.AnalyzeTokenList[] tokenfilters = null;
|
||||
if (randomBoolean()) {
|
||||
charfilters = new AnalyzeAction.CharFilteredText[]{
|
||||
new AnalyzeAction.CharFilteredText("my_charfilter", new String[]{"one two"})
|
||||
};
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
tokenfilters = new AnalyzeAction.AnalyzeTokenList[]{
|
||||
new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_1", tokens),
|
||||
new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_2", tokens)
|
||||
};
|
||||
}
|
||||
AnalyzeAction.DetailAnalyzeResponse dar = new AnalyzeAction.DetailAnalyzeResponse(
|
||||
charfilters,
|
||||
new AnalyzeAction.AnalyzeTokenList("my_tokenizer", tokens),
|
||||
tokenfilters);
|
||||
return new AnalyzeAction.Response(null, dar);
|
||||
}
|
||||
return new AnalyzeAction.Response(Arrays.asList(tokens), null);
|
||||
}
|
||||
|
||||
private AnalyzeAction.AnalyzeToken randomToken() {
|
||||
String token = randomAlphaOfLengthBetween(1, 20);
|
||||
int position = randomIntBetween(0, 1000);
|
||||
int startOffset = randomIntBetween(0, 1000);
|
||||
int endOffset = randomIntBetween(0, 1000);
|
||||
int posLength = randomIntBetween(1, 5);
|
||||
String type = randomAlphaOfLengthBetween(1, 20);
|
||||
Map<String, Object> extras = new HashMap<>();
|
||||
if (randomBoolean()) {
|
||||
int entryCount = randomInt(6);
|
||||
for (int i = 0; i < entryCount; i++) {
|
||||
switch (randomInt(6)) {
|
||||
case 0:
|
||||
case 1:
|
||||
case 2:
|
||||
case 3:
|
||||
String key = randomAlphaOfLength(5);
|
||||
String value = randomAlphaOfLength(10);
|
||||
extras.put(key, value);
|
||||
break;
|
||||
case 4:
|
||||
String objkey = randomAlphaOfLength(5);
|
||||
Map<String, String> obj = new HashMap<>();
|
||||
obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10));
|
||||
extras.put(objkey, obj);
|
||||
break;
|
||||
case 5:
|
||||
String listkey = randomAlphaOfLength(5);
|
||||
List<String> list = new ArrayList<>();
|
||||
list.add(randomAlphaOfLength(4));
|
||||
list.add(randomAlphaOfLength(6));
|
||||
extras.put(listkey, list);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return new AnalyzeAction.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AnalyzeResponse doParseToClientInstance(XContentParser parser) throws IOException {
|
||||
return AnalyzeResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertInstances(AnalyzeAction.Response serverTestInstance, AnalyzeResponse clientInstance) {
|
||||
if (serverTestInstance.detail() != null) {
|
||||
assertNotNull(clientInstance.detail());
|
||||
assertInstances(serverTestInstance.detail(), clientInstance.detail());
|
||||
}
|
||||
else {
|
||||
assertEquals(serverTestInstance.getTokens().size(), clientInstance.getTokens().size());
|
||||
for (int i = 0; i < serverTestInstance.getTokens().size(); i++) {
|
||||
assertEqualTokens(serverTestInstance.getTokens().get(0), clientInstance.getTokens().get(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertEqualTokens(AnalyzeAction.AnalyzeToken serverToken, AnalyzeResponse.AnalyzeToken clientToken) {
|
||||
assertEquals(serverToken.getTerm(), clientToken.getTerm());
|
||||
assertEquals(serverToken.getPosition(), clientToken.getPosition());
|
||||
assertEquals(serverToken.getPositionLength(), clientToken.getPositionLength());
|
||||
assertEquals(serverToken.getStartOffset(), clientToken.getStartOffset());
|
||||
assertEquals(serverToken.getEndOffset(), clientToken.getEndOffset());
|
||||
assertEquals(serverToken.getType(), clientToken.getType());
|
||||
assertEquals(serverToken.getAttributes(), clientToken.getAttributes());
|
||||
}
|
||||
|
||||
private static void assertInstances(AnalyzeAction.DetailAnalyzeResponse serverResponse, DetailAnalyzeResponse clientResponse) {
|
||||
assertInstances(serverResponse.analyzer(), clientResponse.analyzer());
|
||||
assertInstances(serverResponse.tokenizer(), clientResponse.tokenizer());
|
||||
if (serverResponse.tokenfilters() == null) {
|
||||
assertNull(clientResponse.tokenfilters());
|
||||
}
|
||||
else {
|
||||
assertEquals(serverResponse.tokenfilters().length, clientResponse.tokenfilters().length);
|
||||
for (int i = 0; i < serverResponse.tokenfilters().length; i++) {
|
||||
assertInstances(serverResponse.tokenfilters()[i], clientResponse.tokenfilters()[i]);
|
||||
}
|
||||
}
|
||||
if (serverResponse.charfilters() == null) {
|
||||
assertNull(clientResponse.charfilters());
|
||||
}
|
||||
else {
|
||||
assertEquals(serverResponse.charfilters().length, clientResponse.charfilters().length);
|
||||
for (int i = 0; i < serverResponse.charfilters().length; i++) {
|
||||
assertInstances(serverResponse.charfilters()[i], clientResponse.charfilters()[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertInstances(AnalyzeAction.AnalyzeTokenList serverTokens,
|
||||
DetailAnalyzeResponse.AnalyzeTokenList clientTokens) {
|
||||
if (serverTokens == null) {
|
||||
assertNull(clientTokens);
|
||||
}
|
||||
else {
|
||||
assertEquals(serverTokens.getName(), clientTokens.getName());
|
||||
assertEquals(serverTokens.getTokens().length, clientTokens.getTokens().length);
|
||||
for (int i = 0; i < serverTokens.getTokens().length; i++) {
|
||||
assertEqualTokens(serverTokens.getTokens()[i], clientTokens.getTokens()[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertInstances(AnalyzeAction.CharFilteredText serverText, DetailAnalyzeResponse.CharFilteredText clientText) {
|
||||
assertEquals(serverText.getName(), clientText.getName());
|
||||
assertArrayEquals(serverText.getTexts(), clientText.getTexts());
|
||||
}
|
||||
}
|
|
@ -35,6 +35,7 @@ public class FindFileStructureRequestTests extends AbstractXContentTestCase<Find
|
|||
|
||||
static {
|
||||
PARSER.declareInt(FindFileStructureRequest::setLinesToSample, FindFileStructureRequest.LINES_TO_SAMPLE);
|
||||
PARSER.declareInt(FindFileStructureRequest::setLineMergeSizeLimit, FindFileStructureRequest.LINE_MERGE_SIZE_LIMIT);
|
||||
PARSER.declareString((p, c) -> p.setTimeout(TimeValue.parseTimeValue(c, FindFileStructureRequest.TIMEOUT.getPreferredName())),
|
||||
FindFileStructureRequest.TIMEOUT);
|
||||
PARSER.declareString(FindFileStructureRequest::setCharset, FindFileStructureRequest.CHARSET);
|
||||
|
@ -72,6 +73,9 @@ public class FindFileStructureRequestTests extends AbstractXContentTestCase<Find
|
|||
if (randomBoolean()) {
|
||||
findFileStructureRequest.setLinesToSample(randomIntBetween(1000, 2000));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
findFileStructureRequest.setLineMergeSizeLimit(randomIntBetween(10000, 20000));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
findFileStructureRequest.setTimeout(TimeValue.timeValueSeconds(randomIntBetween(10, 20)));
|
||||
}
|
||||
|
|
|
@ -43,6 +43,8 @@ public class FieldStatsTests extends AbstractXContentTestCase<FieldStats> {
|
|||
Double maxValue = null;
|
||||
Double meanValue = null;
|
||||
Double medianValue = null;
|
||||
String earliestTimestamp = null;
|
||||
String latestTimestamp = null;
|
||||
boolean isMetric = randomBoolean();
|
||||
if (isMetric) {
|
||||
if (randomBoolean()) {
|
||||
|
@ -54,6 +56,12 @@ public class FieldStatsTests extends AbstractXContentTestCase<FieldStats> {
|
|||
}
|
||||
meanValue = randomDouble();
|
||||
medianValue = randomDouble();
|
||||
} else {
|
||||
boolean isDate = randomBoolean();
|
||||
if (isDate) {
|
||||
earliestTimestamp = randomAlphaOfLength(20);
|
||||
latestTimestamp = randomAlphaOfLength(20);
|
||||
}
|
||||
}
|
||||
|
||||
List<Map<String, Object>> topHits = new ArrayList<>();
|
||||
|
@ -68,7 +76,7 @@ public class FieldStatsTests extends AbstractXContentTestCase<FieldStats> {
|
|||
topHits.add(topHit);
|
||||
}
|
||||
|
||||
return new FieldStats(count, cardinality, minValue, maxValue, meanValue, medianValue, topHits);
|
||||
return new FieldStats(count, cardinality, minValue, maxValue, meanValue, medianValue, earliestTimestamp, latestTimestamp, topHits);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -44,7 +44,7 @@ dependencies {
|
|||
compile "commons-codec:commons-codec:${versions.commonscodec}"
|
||||
compile "commons-logging:commons-logging:${versions.commonslogging}"
|
||||
|
||||
testCompile "org.elasticsearch.client:test:${version}"
|
||||
testCompile project(":client:test")
|
||||
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
|
||||
testCompile "junit:junit:${versions.junit}"
|
||||
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
|
||||
|
@ -68,7 +68,7 @@ forbiddenApisTest {
|
|||
}
|
||||
|
||||
// JarHell is part of es server, which we don't want to pull in
|
||||
// TODO: Not anymore. Now in :libs:core
|
||||
// TODO: Not anymore. Now in :libs:elasticsearch-core
|
||||
jarHell.enabled=false
|
||||
|
||||
testingConventions {
|
||||
|
|
|
@ -134,7 +134,7 @@ public class RestClientBuilderIntegTests extends RestClientTestCase {
|
|||
* 12.0.1 so we pin to TLSv1.2 when running on an earlier JDK.
|
||||
*/
|
||||
private static String getProtocol() {
|
||||
String version = AccessController.doPrivileged((PrivilegedAction<String>) () -> System.getProperty("java.version"));
|
||||
String version = AccessController.doPrivileged((PrivilegedAction<String>) () -> System.getProperty("java.specification.version"));
|
||||
String[] components = version.split("\\.");
|
||||
if (components.length > 0) {
|
||||
final int major = Integer.valueOf(components[0]);
|
||||
|
|
|
@ -35,14 +35,14 @@ publishing {
|
|||
}
|
||||
|
||||
dependencies {
|
||||
compile "org.elasticsearch.client:elasticsearch-rest-client:${version}"
|
||||
compile project(":client:rest")
|
||||
compile "org.apache.httpcomponents:httpclient:${versions.httpclient}"
|
||||
compile "org.apache.httpcomponents:httpcore:${versions.httpcore}"
|
||||
compile "commons-codec:commons-codec:${versions.commonscodec}"
|
||||
compile "commons-logging:commons-logging:${versions.commonslogging}"
|
||||
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
||||
|
||||
testCompile "org.elasticsearch.client:test:${version}"
|
||||
testCompile project(":client:test")
|
||||
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
|
||||
testCompile "junit:junit:${versions.junit}"
|
||||
testCompile "org.elasticsearch:securemock:${versions.securemock}"
|
||||
|
@ -68,7 +68,7 @@ dependencyLicenses {
|
|||
}
|
||||
|
||||
// JarHell is part of es server, which we don't want to pull in
|
||||
// TODO: Not anymore. Now in :libs:core
|
||||
// TODO: Not anymore. Now in :libs:elasticsearch-core
|
||||
jarHell.enabled=false
|
||||
|
||||
testingConventions {
|
||||
|
|
|
@ -44,7 +44,7 @@ forbiddenApisTest {
|
|||
}
|
||||
|
||||
// JarHell is part of es server, which we don't want to pull in
|
||||
// TODO: Not anymore. Now in :libs:core
|
||||
// TODO: Not anymore. Now in :libs:elasticsearch-core
|
||||
jarHell.enabled=false
|
||||
|
||||
// TODO: should we have licenses for our test deps?
|
||||
|
|
|
@ -23,13 +23,13 @@ apply plugin: 'nebula.maven-scm'
|
|||
group = 'org.elasticsearch.client'
|
||||
|
||||
dependencies {
|
||||
compile "org.elasticsearch:elasticsearch:${version}"
|
||||
compile "org.elasticsearch.plugin:transport-netty4-client:${version}"
|
||||
compile "org.elasticsearch.plugin:reindex-client:${version}"
|
||||
compile "org.elasticsearch.plugin:lang-mustache-client:${version}"
|
||||
compile "org.elasticsearch.plugin:percolator-client:${version}"
|
||||
compile "org.elasticsearch.plugin:parent-join-client:${version}"
|
||||
compile "org.elasticsearch.plugin:rank-eval-client:${version}"
|
||||
compile project(":server")
|
||||
compile project(":modules:transport-netty4")
|
||||
compile project(":modules:reindex")
|
||||
compile project(":modules:lang-mustache")
|
||||
compile project(":modules:percolator")
|
||||
compile project(":modules:parent-join")
|
||||
compile project(":modules:rank-eval")
|
||||
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
|
||||
testCompile "junit:junit:${versions.junit}"
|
||||
testCompile "org.hamcrest:hamcrest:${versions.hamcrest}"
|
||||
|
|
|
@ -36,13 +36,13 @@ apply plugin: 'base'
|
|||
// parent to copy to the root of the distribution
|
||||
ext.logsDir = new File(buildDir, 'logs-hack/logs')
|
||||
task createLogsDir(type: EmptyDirTask) {
|
||||
dir "${logsDir}"
|
||||
dirMode 0755
|
||||
dir = "${logsDir}"
|
||||
dirMode = 0755
|
||||
}
|
||||
ext.pluginsDir= new File(buildDir, 'plugins-hack/plugins')
|
||||
task createPluginsDir(type: EmptyDirTask) {
|
||||
dir "${pluginsDir}"
|
||||
dirMode 0755
|
||||
dir = "${pluginsDir}"
|
||||
dirMode = 0755
|
||||
}
|
||||
|
||||
CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String platform, boolean oss, boolean jdk) {
|
||||
|
|
|
@ -241,7 +241,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
// delay by using closures, since they have not yet been configured, so no jar task exists yet
|
||||
from { project(':server').jar }
|
||||
from { project(':server').configurations.runtime }
|
||||
from { project(':libs:plugin-classloader').jar }
|
||||
from { project(':libs:elasticsearch-plugin-classloader').jar }
|
||||
from { project(':distribution:tools:java-version-checker').jar }
|
||||
from { project(':distribution:tools:launchers').jar }
|
||||
into('tools/plugin-cli') {
|
||||
|
@ -378,6 +378,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
if (details.relativePath.segments[-2] == 'bin' || details.relativePath.segments[-1] == 'jspawnhelper') {
|
||||
details.mode = 0755
|
||||
}
|
||||
if (details.name == 'src.zip') {
|
||||
details.exclude()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -394,11 +397,11 @@ task run(type: RunTask) {
|
|||
setting 'xpack.graph.enabled', 'true'
|
||||
setting 'xpack.watcher.enabled', 'true'
|
||||
setting 'xpack.license.self_generated.type', 'trial'
|
||||
setupCommand 'setupTestAdmin',
|
||||
'bin/elasticsearch-users', 'useradd', 'elastic-admin', '-p', 'elastic-password', '-r', 'superuser'
|
||||
} else if (licenseType != 'basic') {
|
||||
throw new IllegalArgumentException("Unsupported self-generated license type: [" + licenseType + "[basic] or [trial].")
|
||||
}
|
||||
setupCommand 'setupTestAdmin',
|
||||
'bin/elasticsearch-users', 'useradd', 'elastic-admin', '-p', 'elastic-password', '-r', 'superuser'
|
||||
setting 'xpack.security.enabled', 'true'
|
||||
setting 'xpack.monitoring.enabled', 'true'
|
||||
setting 'xpack.sql.enabled', 'true'
|
||||
|
|
|
@ -2,6 +2,7 @@ import org.elasticsearch.gradle.BuildPlugin
|
|||
import org.elasticsearch.gradle.LoggedExec
|
||||
import org.elasticsearch.gradle.MavenFilteringHack
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin
|
||||
|
||||
apply plugin: 'base'
|
||||
apply plugin: 'elasticsearch.test.fixtures'
|
||||
|
@ -44,7 +45,12 @@ project.ext {
|
|||
}
|
||||
|
||||
into('config') {
|
||||
/*
|
||||
* Oss and default distribution can have different configuration, therefore we want to allow overriding the default configuration
|
||||
* by creating config files in oss or default build-context sub-modules.
|
||||
*/
|
||||
from project.projectDir.toPath().resolve("src/docker/config")
|
||||
from project.projectDir.toPath().resolve(oss ? "oss-docker-build-context" : "docker-build-context").resolve("src/docker/config")
|
||||
}
|
||||
|
||||
from(project.projectDir.toPath().resolve("src/docker/Dockerfile")) {
|
||||
|
@ -72,7 +78,10 @@ void addCopyDockerContextTask(final boolean oss) {
|
|||
}
|
||||
|
||||
preProcessFixture {
|
||||
dependsOn assemble
|
||||
// don't add the tasks to build the docker images if we have no way of testing them
|
||||
if (TestFixturesPlugin.dockerComposeSupported()) {
|
||||
dependsOn assemble
|
||||
}
|
||||
}
|
||||
|
||||
postProcessFixture.doLast {
|
||||
|
|
|
@ -0,0 +1,121 @@
|
|||
status = error
|
||||
|
||||
# log action execution errors for easier debugging
|
||||
logger.action.name = org.elasticsearch.action
|
||||
logger.action.level = debug
|
||||
|
||||
appender.rolling.type = Console
|
||||
appender.rolling.name = rolling
|
||||
appender.rolling.layout.type = ESJsonLayout
|
||||
appender.rolling.layout.type_name = server
|
||||
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.rolling.ref = rolling
|
||||
|
||||
appender.deprecation_rolling.type = Console
|
||||
appender.deprecation_rolling.name = deprecation_rolling
|
||||
appender.deprecation_rolling.layout.type = ESJsonLayout
|
||||
appender.deprecation_rolling.layout.type_name = deprecation
|
||||
|
||||
logger.deprecation.name = org.elasticsearch.deprecation
|
||||
logger.deprecation.level = warn
|
||||
logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling
|
||||
logger.deprecation.additivity = false
|
||||
|
||||
appender.index_search_slowlog_rolling.type = Console
|
||||
appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
|
||||
appender.index_search_slowlog_rolling.layout.type = ESJsonLayout
|
||||
appender.index_search_slowlog_rolling.layout.type_name = index_search_slowlog
|
||||
|
||||
logger.index_search_slowlog_rolling.name = index.search.slowlog
|
||||
logger.index_search_slowlog_rolling.level = trace
|
||||
logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling.ref = index_search_slowlog_rolling
|
||||
logger.index_search_slowlog_rolling.additivity = false
|
||||
|
||||
appender.index_indexing_slowlog_rolling.type = Console
|
||||
appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
|
||||
appender.index_indexing_slowlog_rolling.layout.type = ESJsonLayout
|
||||
appender.index_indexing_slowlog_rolling.layout.type_name = index_indexing_slowlog
|
||||
|
||||
logger.index_indexing_slowlog.name = index.indexing.slowlog.index
|
||||
logger.index_indexing_slowlog.level = trace
|
||||
logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling
|
||||
logger.index_indexing_slowlog.additivity = false
|
||||
|
||||
appender.audit_rolling.type = Console
|
||||
appender.audit_rolling.name = audit_rolling
|
||||
appender.audit_rolling.layout.type = PatternLayout
|
||||
appender.audit_rolling.layout.pattern = {\
|
||||
"type": "audit", \
|
||||
"timestamp":"%d{yyyy-MM-dd'T'HH:mm:ss,SSSZ}"\
|
||||
%varsNotEmpty{, "node.name":"%enc{%map{node.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "node.id":"%enc{%map{node.id}}{JSON}"}\
|
||||
%varsNotEmpty{, "host.name":"%enc{%map{host.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "host.ip":"%enc{%map{host.ip}}{JSON}"}\
|
||||
%varsNotEmpty{, "event.type":"%enc{%map{event.type}}{JSON}"}\
|
||||
%varsNotEmpty{, "event.action":"%enc{%map{event.action}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.name":"%enc{%map{user.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.run_by.name":"%enc{%map{user.run_by.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.run_as.name":"%enc{%map{user.run_as.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.realm":"%enc{%map{user.realm}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.run_by.realm":"%enc{%map{user.run_by.realm}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.run_as.realm":"%enc{%map{user.run_as.realm}}{JSON}"}\
|
||||
%varsNotEmpty{, "user.roles":%map{user.roles}}\
|
||||
%varsNotEmpty{, "origin.type":"%enc{%map{origin.type}}{JSON}"}\
|
||||
%varsNotEmpty{, "origin.address":"%enc{%map{origin.address}}{JSON}"}\
|
||||
%varsNotEmpty{, "realm":"%enc{%map{realm}}{JSON}"}\
|
||||
%varsNotEmpty{, "url.path":"%enc{%map{url.path}}{JSON}"}\
|
||||
%varsNotEmpty{, "url.query":"%enc{%map{url.query}}{JSON}"}\
|
||||
%varsNotEmpty{, "request.method":"%enc{%map{request.method}}{JSON}"}\
|
||||
%varsNotEmpty{, "request.body":"%enc{%map{request.body}}{JSON}"}\
|
||||
%varsNotEmpty{, "request.id":"%enc{%map{request.id}}{JSON}"}\
|
||||
%varsNotEmpty{, "action":"%enc{%map{action}}{JSON}"}\
|
||||
%varsNotEmpty{, "request.name":"%enc{%map{request.name}}{JSON}"}\
|
||||
%varsNotEmpty{, "indices":%map{indices}}\
|
||||
%varsNotEmpty{, "opaque_id":"%enc{%map{opaque_id}}{JSON}"}\
|
||||
%varsNotEmpty{, "x_forwarded_for":"%enc{%map{x_forwarded_for}}{JSON}"}\
|
||||
%varsNotEmpty{, "transport.profile":"%enc{%map{transport.profile}}{JSON}"}\
|
||||
%varsNotEmpty{, "rule":"%enc{%map{rule}}{JSON}"}\
|
||||
%varsNotEmpty{, "event.category":"%enc{%map{event.category}}{JSON}"}\
|
||||
}%n
|
||||
# "node.name" node name from the `elasticsearch.yml` settings
|
||||
# "node.id" node id which should not change between cluster restarts
|
||||
# "host.name" unresolved hostname of the local node
|
||||
# "host.ip" the local bound ip (i.e. the ip listening for connections)
|
||||
# "event.type" a received REST request is translated into one or more transport requests. This indicates which processing layer generated the event "rest" or "transport" (internal)
|
||||
# "event.action" the name of the audited event, eg. "authentication_failed", "access_granted", "run_as_granted", etc.
|
||||
# "user.name" the subject name as authenticated by a realm
|
||||
# "user.run_by.name" the original authenticated subject name that is impersonating another one.
|
||||
# "user.run_as.name" if this "event.action" is of a run_as type, this is the subject name to be impersonated as.
|
||||
# "user.realm" the name of the realm that authenticated "user.name"
|
||||
# "user.run_by.realm" the realm name of the impersonating subject ("user.run_by.name")
|
||||
# "user.run_as.realm" if this "event.action" is of a run_as type, this is the realm name the impersonated user is looked up from
|
||||
# "user.roles" the roles array of the user; these are the roles that are granting privileges
|
||||
# "origin.type" it is "rest" if the event is originating (is in relation to) a REST request; possible other values are "transport" and "ip_filter"
|
||||
# "origin.address" the remote address and port of the first network hop, i.e. a REST proxy or another cluster node
|
||||
# "realm" name of a realm that has generated an "authentication_failed" or an "authentication_successful"; the subject is not yet authenticated
|
||||
# "url.path" the URI component between the port and the query string; it is percent (URL) encoded
|
||||
# "url.query" the URI component after the path and before the fragment; it is percent (URL) encoded
|
||||
# "request.method" the method of the HTTP request, i.e. one of GET, POST, PUT, DELETE, OPTIONS, HEAD, PATCH, TRACE, CONNECT
|
||||
# "request.body" the content of the request body entity, JSON escaped
|
||||
# "request.id" a synthentic identifier for the incoming request, this is unique per incoming request, and consistent across all audit events generated by that request
|
||||
# "action" an action is the most granular operation that is authorized and this identifies it in a namespaced way (internal)
|
||||
# "request.name" if the event is in connection to a transport message this is the name of the request class, similar to how rest requests are identified by the url path (internal)
|
||||
# "indices" the array of indices that the "action" is acting upon
|
||||
# "opaque_id" opaque value conveyed by the "X-Opaque-Id" request header
|
||||
# "x_forwarded_for" the addresses from the "X-Forwarded-For" request header, as a verbatim string value (not an array)
|
||||
# "transport.profile" name of the transport profile in case this is a "connection_granted" or "connection_denied" event
|
||||
# "rule" name of the applied rulee if the "origin.type" is "ip_filter"
|
||||
# "event.category" fixed value "elasticsearch-audit"
|
||||
|
||||
logger.xpack_security_audit_logfile.name = org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail
|
||||
logger.xpack_security_audit_logfile.level = info
|
||||
logger.xpack_security_audit_logfile.appenderRef.audit_rolling.ref = audit_rolling
|
||||
logger.xpack_security_audit_logfile.additivity = false
|
||||
|
||||
logger.xmlsig.name = org.apache.xml.security.signature.XMLSignature
|
||||
logger.xmlsig.level = error
|
||||
logger.samlxml_decrypt.name = org.opensaml.xmlsec.encryption.support.Decrypter
|
||||
logger.samlxml_decrypt.level = fatal
|
||||
logger.saml2_decrypt.name = org.opensaml.saml.saml2.encryption.Decrypter
|
||||
logger.saml2_decrypt.level = fatal
|
|
@ -22,11 +22,11 @@ apply plugin: 'elasticsearch.build'
|
|||
archivesBaseName = 'elasticsearch-plugin-cli'
|
||||
|
||||
dependencies {
|
||||
compileOnly "org.elasticsearch:elasticsearch:${version}"
|
||||
compileOnly "org.elasticsearch:elasticsearch-cli:${version}"
|
||||
compileOnly project(":server")
|
||||
compileOnly project(":libs:elasticsearch-cli")
|
||||
compile "org.bouncycastle:bcpg-jdk15on:${versions.bouncycastle}"
|
||||
compile "org.bouncycastle:bcprov-jdk15on:${versions.bouncycastle}"
|
||||
testCompile "org.elasticsearch.test:framework:${version}"
|
||||
testCompile project(":test:framework")
|
||||
testCompile 'com.google.jimfs:jimfs:1.1'
|
||||
testCompile 'com.google.guava:guava:18.0'
|
||||
}
|
||||
|
@ -40,8 +40,8 @@ test {
|
|||
systemProperty 'tests.security.manager', 'false'
|
||||
}
|
||||
|
||||
if (project.inFipsJvm) {
|
||||
thirdPartyAudit.onlyIf {
|
||||
// FIPS JVM includes manny classes from bouncycastle which count as jar hell for the third party audit,
|
||||
// rather than provide a long list of exclusions, disable the check on FIPS.
|
||||
thirdPartyAudit.enabled = false
|
||||
project.inFipsJvm == false
|
||||
}
|
|
@ -62,9 +62,9 @@ for its modifiers:
|
|||
them" which looks like `// TESTRESPONSE[s/\d+/$body.$_path/]`.
|
||||
* You can't use `// TESTRESPONSE` immediately after `// TESTSETUP`. Instead,
|
||||
consider using `// TEST[continued]` or rearrange your snippets.
|
||||
* `// TESTRESPONSE[_cat]`: Add substitutions for testing `_cat` responses. Use
|
||||
this after all other substitutions so it doesn't make other substitutions
|
||||
difficult.
|
||||
* `// TESTRESPONSE[non_json]`: Add substitutions for testing responses in a
|
||||
format other than JSON. Use this after all other substitutions so it doesn't
|
||||
make other substitutions difficult.
|
||||
* `// TESTRESPONSE[skip:reason]`: Skip the assertions specified by this
|
||||
response.
|
||||
* `// TESTSETUP`: Marks this snippet as the "setup" for all other snippets in
|
||||
|
|
|
@ -1137,3 +1137,12 @@ buildRestTests.setups['seats'] = '''
|
|||
{"theatre": "Graye", "cost": 33, "row": 2, "number": 6, "sold": false}
|
||||
{"index":{"_id": "4"}}
|
||||
{"theatre": "Skyline", "cost": 20, "row": 5, "number": 2, "sold": false}'''
|
||||
buildRestTests.setups['kibana_sample_data_ecommerce'] = '''
|
||||
- do:
|
||||
indices.create:
|
||||
index: kibana_sample_data_ecommerce
|
||||
body:
|
||||
settings:
|
||||
number_of_shards: 1
|
||||
number_of_replicas: 0
|
||||
'''
|
||||
|
|
|
@ -51,7 +51,7 @@ otherwise modify the request for matching documents.
|
|||
include-tagged::{client-reindex-tests}/ReindexDocumentationIT.java[update-by-query-size]
|
||||
--------------------------------------------------
|
||||
|
||||
You can also combine `size` with sorting to limit the documents updated:
|
||||
You can also combine `maxDocs` with sorting to limit the documents updated:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -39,11 +39,11 @@ include-tagged::{doc-tests-file}[{api}-request-query]
|
|||
--------------------------------------------------
|
||||
<1> Only copy documents which have field `user` set to `kimchy`
|
||||
|
||||
It’s also possible to limit the number of processed documents by setting size.
|
||||
It’s also possible to limit the number of processed documents by setting `maxDocs`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request-size]
|
||||
include-tagged::{doc-tests-file}[{api}-request-maxDocs]
|
||||
--------------------------------------------------
|
||||
<1> Only copy 10 documents
|
||||
|
||||
|
|
|
@ -65,11 +65,11 @@ include-tagged::{doc-tests-file}[{api}-request-query]
|
|||
--------------------------------------------------
|
||||
<1> Only copy documents which have field `user` set to `kimchy`
|
||||
|
||||
It’s also possible to limit the number of processed documents by setting size.
|
||||
It’s also possible to limit the number of processed documents by setting `maxDocs`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request-size]
|
||||
include-tagged::{doc-tests-file}[{api}-request-maxDocs]
|
||||
--------------------------------------------------
|
||||
<1> Only copy 10 documents
|
||||
|
||||
|
@ -90,7 +90,7 @@ include-tagged::{doc-tests-file}[{api}-request-pipeline]
|
|||
<1> set pipeline to `my_pipeline`
|
||||
|
||||
If you want a particular set of documents from the source index you’ll need to use sort. If possible, prefer a more
|
||||
selective query to size and sort.
|
||||
selective query to maxDocs and sort.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -40,11 +40,11 @@ include-tagged::{doc-tests-file}[{api}-request-query]
|
|||
--------------------------------------------------
|
||||
<1> Only copy documents which have field `user` set to `kimchy`
|
||||
|
||||
It’s also possible to limit the number of processed documents by setting size.
|
||||
It’s also possible to limit the number of processed documents by setting `maxDocs`.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-request-size]
|
||||
include-tagged::{doc-tests-file}[{api}-request-maxDocs]
|
||||
--------------------------------------------------
|
||||
<1> Only copy 10 documents
|
||||
|
||||
|
|
|
@ -19,18 +19,18 @@ The simplest version uses a built-in analyzer:
|
|||
---------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-builtin-request]
|
||||
---------------------------------------------------
|
||||
<1> The text to include. Multiple strings are treated as a multi-valued field
|
||||
<2> A built-in analyzer
|
||||
<1> A built-in analyzer
|
||||
<2> The text to include. Multiple strings are treated as a multi-valued field
|
||||
|
||||
You can configure a custom analyzer:
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
---------------------------------------------------
|
||||
include-tagged::{doc-tests-file}[{api}-custom-request]
|
||||
---------------------------------------------------
|
||||
<1> Configure char filters
|
||||
<1> Configuration for a custom tokenfilter
|
||||
<2> Configure the tokenizer
|
||||
<3> Add a built-in tokenfilter
|
||||
<4> Configuration for a custom tokenfilter
|
||||
<3> Configure char filters
|
||||
<4> Add a built-in tokenfilter
|
||||
<5> Add the custom tokenfilter
|
||||
|
||||
You can also build a custom normalizer, by including only charfilters and
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
include::painless-walkthrough.asciidoc[]
|
||||
|
||||
include::painless-datetime.asciidoc[]
|
||||
|
||||
include::painless-method-dispatch.asciidoc[]
|
||||
|
||||
include::painless-debugging.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,336 @@
|
|||
[[painless-datetime]]
|
||||
=== Using Datetime in Painless
|
||||
|
||||
==== Datetime API
|
||||
|
||||
Datetimes in Painless use the standard Java libraries and are available through
|
||||
the Painless <<painless-api-reference-shared, Shared API>>. Most of the classes
|
||||
from the following Java packages are available to use in Painless scripts:
|
||||
|
||||
* <<painless-api-reference-shared-java-time, java.time>>
|
||||
* <<painless-api-reference-shared-java-time-chrono, java.time.chrono>>
|
||||
* <<painless-api-reference-shared-java-time-format, java.time.format>>
|
||||
* <<painless-api-reference-shared-java-time-temporal, java.time.temporal>>
|
||||
* <<painless-api-reference-shared-java-time-zone, java.time.zone>>
|
||||
|
||||
==== Datetime Representation
|
||||
|
||||
Datetimes in Painless are most commonly represented as a numeric value, a
|
||||
string value, or a complex value.
|
||||
|
||||
numeric:: a datetime representation as a number from a starting offset called
|
||||
an epoch; in Painless this is typically a <<primitive-types, long>> as
|
||||
milliseconds since an epoch of 1970-01-01 00:00:00 Zulu Time
|
||||
string:: a datetime representation as a sequence of characters defined by
|
||||
a standard format or a custom format; in Painless this is typically a
|
||||
<<string-type, String>> of the standard format
|
||||
https://en.wikipedia.org/wiki/ISO_8601[ISO 8601]
|
||||
complex:: a datetime representation as a complex type
|
||||
(<<reference-types, object>>) that abstracts away internal details of how the
|
||||
datetime is stored and often provides utilities for modification and
|
||||
comparison; in Painless this is typically a
|
||||
<<painless-api-reference-shared-ZonedDateTime>>
|
||||
|
||||
Switching between different representations of datetimes is often necessary to
|
||||
achieve a script's objective(s). A typical pattern in a script is to switch a
|
||||
numeric or string datetime to a complex datetime, modify or compare the complex
|
||||
datetime, and then switch it back to a numeric or string datetime for storage
|
||||
or to return a result.
|
||||
|
||||
==== Datetime Parsing and Formatting
|
||||
|
||||
Datetime parsing is a switch from a string datetime to a complex datetime, and
|
||||
datetime formatting is a switch from a complex datetime to a string datetime.
|
||||
|
||||
A <<painless-api-reference-shared-DateTimeFormatter, DateTimeFormatter>> is a
|
||||
complex type (<<reference-types, object>>) that defines the allowed sequence
|
||||
of characters for a string datetime. Datetime parsing and formatting often
|
||||
requires a DateTimeFormatter. For more information about how to use a
|
||||
DateTimeFormatter see the
|
||||
{java11-javadoc}/java.base/java/time/format/DateTimeFormatter.html[Java documentation].
|
||||
|
||||
===== Datetime Parsing Examples
|
||||
|
||||
* parse from milliseconds
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
String milliSinceEpochString = "434931330000";
|
||||
long milliSinceEpoch = Long.parseLong(milliSinceEpochString);
|
||||
Instant instant = Instant.ofEpochMilli(milliSinceEpoch);
|
||||
ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));
|
||||
----
|
||||
+
|
||||
* parse from ISO 8601
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
String datetime = '1983-10-13T22:15:30Z';
|
||||
ZonedDateTime zdt = ZonedDateTime.parse(datetime);
|
||||
----
|
||||
Note the parse method uses ISO 8601 by default.
|
||||
+
|
||||
* parse from RFC 1123
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
String datetime = 'Thu, 13 Oct 1983 22:15:30 GMT';
|
||||
ZonedDateTime zdt = ZonedDateTime.parse(datetime,
|
||||
DateTimeFormatter.RFC_1123_DATE_TIME);
|
||||
----
|
||||
Note the use of a built-in DateTimeFormatter.
|
||||
+
|
||||
* parse from a custom format
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
String datetime = 'custom y 1983 m 10 d 13 22:15:30 Z';
|
||||
DateTimeFormatter dtf = DateTimeFormatter.ofPattern(
|
||||
"'custom' 'y' yyyy 'm' MM 'd' dd HH:mm:ss VV");
|
||||
ZonedDateTime zdt = ZonedDateTime.parse(datetime, dtf);
|
||||
----
|
||||
Note the use of a custom DateTimeFormatter.
|
||||
|
||||
===== Datetime Formatting Examples
|
||||
|
||||
* format to ISO 8601
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
|
||||
String datetime = zdt.format(DateTimeFormatter.ISO_INSTANT);
|
||||
----
|
||||
Note the use of a built-in DateTimeFormatter.
|
||||
+
|
||||
* format to a custom format
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
|
||||
DateTimeFormatter dtf = DateTimeFormatter.ofPattern(
|
||||
"'date:' yyyy/MM/dd 'time:' HH:mm:ss");
|
||||
String datetime = zdt.format(dtf);
|
||||
----
|
||||
Note the use of a custom DateTimeFormatter.
|
||||
|
||||
==== Datetime Conversion
|
||||
|
||||
Datetime conversion is a switch from a numeric datetime to a complex datetime
|
||||
and vice versa.
|
||||
|
||||
===== Datetime Conversion Examples
|
||||
|
||||
* convert from milliseconds
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
long milliSinceEpoch = 434931330000L;
|
||||
Instant instant = Instant.ofEpochMilli(milliSinceEpoch);
|
||||
ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.of('Z'));
|
||||
----
|
||||
+
|
||||
* convert to milliseconds
|
||||
+
|
||||
[source,Painless]
|
||||
-----
|
||||
ZonedDateTime zdt =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
|
||||
long milliSinceEpoch = zdt.toInstant().toEpochMilli();
|
||||
-----
|
||||
|
||||
==== Datetime Pieces
|
||||
|
||||
Datetime representations often contain the data to extract individual datetime
|
||||
pieces such as year, hour, timezone, etc. Use individual pieces of a datetime
|
||||
to create a complex datetime, and use a complex datetime to extract individual
|
||||
pieces.
|
||||
|
||||
===== Datetime Pieces Examples
|
||||
|
||||
* create a complex datetime from pieces
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
int year = 1983;
|
||||
int month = 10;
|
||||
int day = 13;
|
||||
int hour = 22;
|
||||
int minutes = 15;
|
||||
int seconds = 30;
|
||||
int nanos = 0;
|
||||
ZonedDateTime zdt = ZonedDateTime.of(
|
||||
year, month, day, hour, minutes, seconds, nanos, ZoneId.of('Z'));
|
||||
----
|
||||
+
|
||||
* extract pieces from a complex datetime
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 100, ZoneId.of(tz));
|
||||
int year = zdt.getYear();
|
||||
int month = zdt.getMonthValue();
|
||||
int day = zdt.getDayOfMonth();
|
||||
int hour = zdt.getHour();
|
||||
int minutes = zdt.getMinute();
|
||||
int seconds = zdt.getSecond();
|
||||
int nanos = zdt.getNano();
|
||||
----
|
||||
|
||||
==== Datetime Modification
|
||||
|
||||
Use either a numeric datetime or a complex datetime to do modification such as
|
||||
adding several seconds to a datetime or subtracting several days from a
|
||||
datetime. Use standard <<painless-operators-numeric, numeric operators>> to
|
||||
modify a numeric datetime. Use
|
||||
<<painless-api-reference-shared-ZonedDateTime, methods>> (or fields) to modify
|
||||
a complex datetime. Note many complex datetimes are immutable so upon
|
||||
modification a new complex datetime is created that requires
|
||||
<<variable-assignment, assignment>> or immediate use.
|
||||
|
||||
===== Datetime Modification Examples
|
||||
|
||||
* Subtract three seconds from a numeric datetime in milliseconds
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
long milliSinceEpoch = 434931330000L;
|
||||
milliSinceEpoch = milliSinceEpoch - 1000L*3L;
|
||||
----
|
||||
+
|
||||
* Add three days to a complex datetime
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
|
||||
ZonedDateTime updatedZdt = zdt.plusDays(3);
|
||||
----
|
||||
+
|
||||
* Subtract 125 minutes from a complex datetime
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
|
||||
ZonedDateTime updatedZdt = zdt.minusMinutes(125);
|
||||
----
|
||||
+
|
||||
* Set the year on a complex datetime
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
|
||||
ZonedDateTime updatedZdt = zdt.withYear(1976);
|
||||
----
|
||||
|
||||
==== Datetime Difference (Elapsed Time)
|
||||
|
||||
Use either two numeric datetimes or two complex datetimes to calculate the
|
||||
difference (elapsed time) between two different datetimes. Use
|
||||
<<subtraction-operator, subtraction>> to calculate the difference between
|
||||
between two numeric datetimes of the same time unit such as milliseconds. For
|
||||
complex datetimes there is often a method or another complex type
|
||||
(<<reference-types, object>>) available to calculate the difference. Use
|
||||
<<painless-api-reference-shared-ChronoUnit, ChronoUnit>>
|
||||
to calculate the difference between two complex datetimes if supported.
|
||||
|
||||
===== Elapsed Time Examples
|
||||
|
||||
* Difference in milliseconds between two numeric datetimes
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
long startTimestamp = 434931327000L;
|
||||
long endTimestamp = 434931330000L;
|
||||
long differenceInMillis = endTimestamp - startTimestamp;
|
||||
----
|
||||
+
|
||||
* Difference in milliseconds between two complex datetimes
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt1 =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));
|
||||
ZonedDateTime zdt2 =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 35, 0, ZoneId.of('Z'));
|
||||
long differenceInMillis = ChronoUnit.MILLIS.between(zdt1, zdt2);
|
||||
----
|
||||
+
|
||||
* Difference in days between two complex datetimes
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt1 =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 11000000, ZoneId.of('Z'));
|
||||
ZonedDateTime zdt2 =
|
||||
ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));
|
||||
long differenceInDays = ChronoUnit.DAYS.between(zdt1, zdt2);
|
||||
----
|
||||
|
||||
==== Datetime Comparison
|
||||
|
||||
Use either two numeric datetimes or two complex datetimes to do a datetime
|
||||
comparison. Use standard <<painless-operators-boolean, comparison operators>>
|
||||
to compare two numeric datetimes of the same time unit such as milliseconds.
|
||||
For complex datetimes there is often a method or another complex type
|
||||
(<<reference-types, object>>) available to do the comparison.
|
||||
|
||||
===== Datetime Comparison Examples
|
||||
|
||||
* Greater than comparison of two numeric datetimes in milliseconds
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
long timestamp1 = 434931327000L;
|
||||
long timestamp2 = 434931330000L;
|
||||
|
||||
if (timestamp1 > timestamp2) {
|
||||
// handle condition
|
||||
}
|
||||
----
|
||||
+
|
||||
* Equality comparision of two complex datetimes
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt1 =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
|
||||
ZonedDateTime zdt2 =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
|
||||
|
||||
if (zdt1.equals(zdt2)) {
|
||||
// handle condition
|
||||
}
|
||||
----
|
||||
+
|
||||
* Less than comparision of two complex datetimes
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt1 =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
|
||||
ZonedDateTime zdt2 =
|
||||
ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));
|
||||
|
||||
if (zdt1.isBefore(zdt2)) {
|
||||
// handle condition
|
||||
}
|
||||
----
|
||||
+
|
||||
* Greater than comparision of two complex datetimes
|
||||
+
|
||||
[source,Painless]
|
||||
----
|
||||
ZonedDateTime zdt1 =
|
||||
ZonedDateTime.of(1983, 10, 13, 22, 15, 30, 0, ZoneId.of('Z'));
|
||||
ZonedDateTime zdt2 =
|
||||
ZonedDateTime.of(1983, 10, 17, 22, 15, 35, 0, ZoneId.of('Z'));
|
||||
|
||||
if (zdt1.isAfter(zdt2)) {
|
||||
// handle condition
|
||||
}
|
||||
----
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue