Merge remote-tracking branch 'es/7.x' into enrich-7.x

This commit is contained in:
Martijn van Groningen 2019-10-07 10:07:56 +02:00
commit f2f2304c75
No known key found for this signature in database
GPG Key ID: AB236F4FCF2AF12A
830 changed files with 25507 additions and 8249 deletions

View File

@ -43,5 +43,5 @@ BWC_VERSION:
- "7.3.0"
- "7.3.1"
- "7.3.2"
- "7.3.3"
- "7.4.0"
- "7.4.1"

View File

@ -10,14 +10,18 @@ initscript {
}
}
['VAULT_ADDR', 'VAULT_ROLE_ID', 'VAULT_SECRET_ID'].each {
if (System.env."$it" == null) {
throw new GradleException("$it must be set!")
boolean USE_ARTIFACTORY=false
}
if (System.getenv('VAULT_ADDR') == null) {
throw new GradleException("You must set the VAULT_ADDR environment variable to use this init script.")
}
final String vaultToken = new Vault(
if (System.getenv('VAULT_ROLE_ID') == null && System.getenv('VAULT_SECRET_ID') == null && System.getenv('VAULT_TOKEN') == null) {
throw new GradleException("You must set either the VAULT_ROLE_ID and VAULT_SECRET_ID environment variables, " +
"or the VAULT_TOKEN environment variable to use this init script.")
}
final String vaultToken = System.getenv('VAULT_TOKEN') ?: new Vault(
new VaultConfig()
.address(System.env.VAULT_ADDR)
.engineVersion(1)
@ -37,39 +41,44 @@ final Vault vault = new Vault(
)
.withRetries(5, 1000)
final Map<String,String> artifactoryCredentials = vault.logical()
.read("secret/elasticsearch-ci/artifactory.elstc.co")
.getData();
logger.info("Using elastic artifactory repos")
Closure configCache = {
return {
name "artifactory-gradle-release"
url "https://artifactory.elstc.co/artifactory/gradle-release"
credentials {
username artifactoryCredentials.get("username")
password artifactoryCredentials.get("token")
if (USE_ARTIFACTORY) {
final Map<String,String> artifactoryCredentials = vault.logical()
.read("secret/elasticsearch-ci/artifactory.elstc.co")
.getData();
logger.info("Using elastic artifactory repos")
Closure configCache = {
return {
name "artifactory-gradle-release"
url "https://artifactory.elstc.co/artifactory/gradle-release"
credentials {
username artifactoryCredentials.get("username")
password artifactoryCredentials.get("token")
}
}
}
}
settingsEvaluated { settings ->
settings.pluginManagement {
repositories {
maven configCache()
}
}
}
projectsLoaded {
allprojects {
buildscript {
settingsEvaluated { settings ->
settings.pluginManagement {
repositories {
maven configCache()
}
}
repositories {
maven configCache()
}
projectsLoaded {
allprojects {
buildscript {
repositories {
maven configCache()
}
}
repositories {
maven configCache()
}
}
}
}
projectsLoaded {
rootProject {
project.pluginManager.withPlugin('com.gradle.build-scan') {
buildScan.server = 'https://gradle-enterprise.elastic.co'
@ -77,6 +86,7 @@ projectsLoaded {
}
}
final String buildCacheUrl = System.getProperty('org.elasticsearch.build.cache.url')
final boolean buildCachePush = Boolean.valueOf(System.getProperty('org.elasticsearch.build.cache.push', 'false'))

36
.ci/os.ps1 Normal file
View File

@ -0,0 +1,36 @@
If (-NOT ([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole]::Administrator))
{
# Relaunch as an elevated process:
Start-Process powershell.exe "-File",('"{0}"' -f $MyInvocation.MyCommand.Path) -Verb RunAs
exit
}
# CI configures these, uncoment if running manually
#
# $env:ES_BUILD_JAVA="java12"
#$env:ES_RUNTIME_JAVA="java11"
$ErrorActionPreference="Stop"
$gradleInit = "C:\Users\$env:username\.gradle\init.d\"
echo "Remove $gradleInit"
Remove-Item -Recurse -Force $gradleInit -ErrorAction Ignore
New-Item -ItemType directory -Path $gradleInit
echo "Copy .ci/init.gradle to $gradleInit"
Copy-Item .ci/init.gradle -Destination $gradleInit
[Environment]::SetEnvironmentVariable("JAVA_HOME", $null, "Machine")
$env:PATH="C:\Users\jenkins\.java\$env:ES_BUILD_JAVA\bin\;$env:PATH"
$env:JAVA_HOME=$null
$env:SYSTEM_JAVA_HOME="C:\Users\jenkins\.java\$env:ES_RUNTIME_JAVA"
Remove-Item -Recurse -Force \tmp -ErrorAction Ignore
New-Item -ItemType directory -Path \tmp
$ErrorActionPreference="Continue"
# TODO: remove the task exclusions once dependencies are set correctly and these don't run for Windows or buldiung the deb on windows is fixed
& .\gradlew.bat -g "C:\Users\$env:username\.gradle" --parallel --scan --console=plain destructiveDistroTest `
-x :distribution:packages:buildOssDeb `
-x :distribution:packages:buildDeb `
-x :distribution:packages:buildOssRpm `
-x :distribution:packages:buildRpm `
exit $?

68
.ci/os.sh Executable file
View File

@ -0,0 +1,68 @@
#!/bin/bash
# opensuse 15 has a missing dep for systemd
if which zypper > /dev/null ; then
sudo zypper install -y insserv-compat
fi
# Required by bats
sudo touch /etc/is_vagrant_vm
sudo useradd vagrant
set -e
. .ci/java-versions.properties
RUNTIME_JAVA_HOME=$HOME/.java/$ES_RUNTIME_JAVA
BUILD_JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
rm -Rfv $HOME/.gradle/init.d/ && mkdir -p $HOME/.gradle/init.d
cp -v .ci/init.gradle $HOME/.gradle/init.d
unset JAVA_HOME
if ! [ -e "/usr/bin/bats" ] ; then
git clone https://github.com/sstephenson/bats /tmp/bats
sudo /tmp/bats/install.sh /usr
fi
if [ -f "/etc/os-release" ] ; then
cat /etc/os-release
. /etc/os-release
if [[ "$ID" == "debian" || "$ID_LIKE" == "debian" ]] ; then
# FIXME: The base image should not have rpm installed
sudo rm -Rf /usr/bin/rpm
fi
else
cat /etc/issue || true
fi
sudo bash -c 'cat > /etc/sudoers.d/elasticsearch_vars' << SUDOERS_VARS
Defaults env_keep += "ZIP"
Defaults env_keep += "TAR"
Defaults env_keep += "RPM"
Defaults env_keep += "DEB"
Defaults env_keep += "PACKAGING_ARCHIVES"
Defaults env_keep += "PACKAGING_TESTS"
Defaults env_keep += "BATS_UTILS"
Defaults env_keep += "BATS_TESTS"
Defaults env_keep += "SYSTEM_JAVA_HOME"
Defaults env_keep += "JAVA_HOME"
SUDOERS_VARS
sudo chmod 0440 /etc/sudoers.d/elasticsearch_vars
# Bats tests still use this locationa
sudo rm -Rf /elasticsearch
sudo mkdir -p /elasticsearch/qa/ && sudo chown jenkins /elasticsearch/qa/ && ln -s $PWD/qa/vagrant /elasticsearch/qa/
# sudo sets it's own PATH thus we use env to override that and call sudo annother time so we keep the secure root PATH
# run with --continue to run both bats and java tests even if one fails
# be explicit about Gradle home dir so we use the same even with sudo
sudo -E env \
PATH=$BUILD_JAVA_HOME/bin:`sudo bash -c 'echo -n $PATH'` \
RUNTIME_JAVA_HOME=`readlink -f -n $RUNTIME_JAVA_HOME` \
--unset=JAVA_HOME \
SYSTEM_JAVA_HOME=`readlink -f -n $RUNTIME_JAVA_HOME` \
./gradlew -g $HOME/.gradle --scan --parallel $@ --continue destructivePackagingTest

362
.eclipseformat.xml Normal file
View File

@ -0,0 +1,362 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<profiles version="17">
<profile kind="CodeFormatterProfile" name="Elasticsearch" version="17">
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_logical_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation" value="separate_lines_if_wrapped"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration" value="separate_lines_if_wrapped"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_default" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.align_with_spaces" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_before_code_block" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_switch_case_expressions" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="49"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_method_body" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_case" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_multiplicative_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_logical_operator" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_annotation_declaration_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_multiplicative_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="140"/>
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.keep_method_body_on_one_line" value="one_line_if_empty"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_loop_body_block_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_abstract_method" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.keep_enum_constant_declaration_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.align_variable_declarations_on_columns" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_type_declaration_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_additive_operator" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_relational_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiplicative_operator" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.keep_anonymous_type_declaration_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_switch_case_expressions" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_shift_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_lambda_body" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_code_block" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_bitwise_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_parameters" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="48"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_loops" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_simple_for_body_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_relational_operator" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_additive_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_string_concatenation" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_module_statements" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_after_code_block" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.keep_if_then_body_block_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="48"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.align_assignment_statements_on_columns" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_default" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression_chain" value="48"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_additive_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_conditional_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_shift_operator" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines" value="2147483647"/>
<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_bitwise_operator" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.keep_code_block_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_bitwise_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="49"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_assignment_operator" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration" value="separate_lines_if_wrapped"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.keep_lambda_body_block_on_one_line" value="one_line_always"/>
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_type_arguments" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_case" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_logical_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_bitwise_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_relational_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.indent_tag_description" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="48"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_string_concatenation" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_last_class_body_declaration" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_simple_while_body_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_logical_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_shift_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_statement_group_in_switch" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_shift_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_simple_do_while_body_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_enum_declaration_on_one_line" value="one_line_never"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_multiplicative_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_additive_operator" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.keep_simple_getter_setter_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_string_concatenation" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_code_block" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_relational_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_before_string_concatenation" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="140"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
</profile>
</profiles>

View File

@ -109,17 +109,16 @@ and `JAVA11_HOME` available so that the tests can pass.
Elasticsearch uses the Gradle wrapper for its build. You can execute Gradle
using the wrapper via the `gradlew` script in the root of the repository.
We support development in the Eclipse and IntelliJ IDEs. For Eclipse, the
minimum version that we support is [Eclipse Oxygen][eclipse] (version 4.7). For
IntelliJ, the minimum version that we support is [IntelliJ 2017.2][intellij].
We support development in the Eclipse and IntelliJ IDEs.
For Eclipse, the minimum version that we support is [4.13][eclipse].
For IntelliJ, the minimum version that we support is [IntelliJ 2017.2][intellij].
### Configuring IDEs And Running Tests
Eclipse users can automatically configure their IDE: `./gradlew eclipse`
then `File: Import: Existing Projects into Workspace`. Select the
option `Search for nested projects`. Additionally you will want to
ensure that Eclipse is using 2048m of heap by modifying `eclipse.ini`
accordingly to avoid GC overhead errors.
then `File: Import: Gradle : Existing Gradle Project`.
Additionally you will want to ensure that Eclipse is using 2048m of heap by modifying
`eclipse.ini` accordingly to avoid GC overhead and OOM errors.
IntelliJ users can automatically configure their IDE: `./gradlew idea`
then `File->New Project From Existing Sources`. Point to the root of
@ -155,19 +154,68 @@ For Eclipse, go to `Preferences->Java->Installed JREs` and add `-ea` to
### Java Language Formatting Guidelines
Java files in the Elasticsearch codebase are formatted with the Eclipse JDT
formatter, using the [Spotless
Gradle](https://github.com/diffplug/spotless/tree/master/plugin-gradle)
plugin. This plugin is configured on a project-by-project basis, via
`build.gradle` in the root of the repository. So long as at least one
project is configured, the formatting check can be run explicitly with:
./gradlew spotlessJavaCheck
The code can be formatted with:
./gradlew spotlessApply
These tasks can also be run for specific subprojects, e.g.
./gradlew server:spotlessJavaCheck
Please follow these formatting guidelines:
* Java indent is 4 spaces
* Line width is 140 characters
* Lines of code surrounded by `// tag` and `// end` comments are included in the
documentation and should only be 76 characters wide not counting
leading indentation
* The rest is left to Java coding standards
* Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do.
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. This can be done automatically by your IDE:
* Eclipse: `Preferences->Java->Code Style->Organize Imports`. There are two boxes labeled "`Number of (static )? imports needed for .*`". Set their values to 99999 or some other absurdly high value.
* IntelliJ: `Preferences/Settings->Editor->Code Style->Java->Imports`. There are two configuration options: `Class count to use import with '*'` and `Names count to use static import with '*'`. Set their values to 99999 or some other absurdly high value.
* Don't worry too much about import order. Try not to change it but don't worry about fighting your IDE to stop it from doing so.
* Lines of code surrounded by `// tag` and `// end` comments are included
in the documentation and should only be 76 characters wide not counting
leading indentation
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause
the build to fail. This can be done automatically by your IDE:
* Eclipse: `Preferences->Java->Code Style->Organize Imports`. There are
two boxes labeled "`Number of (static )? imports needed for .*`". Set
their values to 99999 or some other absurdly high value.
* IntelliJ: `Preferences/Settings->Editor->Code Style->Java->Imports`.
There are two configuration options: `Class count to use import with
'*'` and `Names count to use static import with '*'`. Set their values
to 99999 or some other absurdly high value.
#### Editor / IDE Support
Eclipse IDEs can import the file [elasticsearch.eclipseformat.xml]
directly.
IntelliJ IDEs can
[import](https://blog.jetbrains.com/idea/2014/01/intellij-idea-13-importing-code-formatter-settings-from-eclipse/)
the same settings file, and / or use the [Eclipse Code
Formatter](https://plugins.jetbrains.com/plugin/6546-eclipse-code-formatter)
plugin.
You can also tell Spotless to [format a specific
file](https://github.com/diffplug/spotless/tree/master/plugin-gradle#can-i-apply-spotless-to-specific-files)
from the command line.
#### Formatting failures
Sometimes Spotless will report a "misbehaving rule which can't make up its
mind" and will recommend enabling the `paddedCell()` setting. If you
enabled this settings and run the format check again,
Spotless will write files to
`$PROJECT/build/spotless-diagnose-java/` to aid diagnosis. It writes
different copies of the formatted files, so that you can see how they
differ and infer what is the problem.
The `paddedCell() option is disabled for normal operation in order to
detect any misbehaviour. You can enabled the option from the command line
by running Gradle with `-Dspotless.paddedcell`.
### License Headers
@ -389,6 +437,6 @@ Finally, we require that you run `./gradlew check` before submitting a
non-documentation contribution. This is mentioned above, but it is worth
repeating in this section because it has come up in this context.
[eclipse]: http://www.eclipse.org/community/eclipse_newsletter/2017/june/
[eclipse]: https://download.eclipse.org/eclipse/downloads/drops4/R-4.13-201909161045/
[intellij]: https://blog.jetbrains.com/idea/2017/07/intellij-idea-2017-2-is-here-smart-sleek-and-snappy/
[shadow-plugin]: https://github.com/johnrengelman/shadow

View File

@ -35,6 +35,7 @@ plugins {
id 'com.gradle.build-scan' version '2.4'
id 'lifecycle-base'
id 'elasticsearch.global-build-info'
id "com.diffplug.gradle.spotless" version "3.24.2" apply false
}
apply plugin: 'nebula.info-scm'
@ -98,6 +99,34 @@ subprojects {
plugins.withType(BuildPlugin).whenPluginAdded {
project.licenseFile = project.rootProject.file('licenses/APACHE-LICENSE-2.0.txt')
project.noticeFile = project.rootProject.file('NOTICE.txt')
// Projects that should be formatted and checked with Spotless are
// listed here, by project path. Once the number of formatted projects
// is greater than the number of unformatted projects, this can be
// switched to an exclude list, and eventualy removed completely.
def projectPathsToFormat = [
// ':build-tools'
]
if (projectPathsToFormat.contains(project.path)) {
project.apply plugin: "com.diffplug.gradle.spotless"
spotless {
java {
removeUnusedImports()
eclipse().configFile rootProject.file('.eclipseformat.xml')
trimTrailingWhitespace()
// See CONTRIBUTING.md for details of when to enabled this.
if (System.getProperty('spotless.paddedcell') != null) {
paddedCell()
}
}
}
precommit.dependsOn 'spotlessJavaCheck'
}
}
}
@ -355,29 +384,19 @@ allprojects {
}
plugins.withType(JavaBasePlugin) {
File eclipseBuild = project.file('build-eclipse')
eclipse.classpath.defaultOutputDir = eclipseBuild
if (isEclipse) {
// set this so generated dirs will be relative to eclipse build
project.buildDir = eclipseBuild
// Work around https://docs.gradle.org/current/userguide/java_gradle_plugin.html confusing Eclipse by the metadata
// it adds to the classpath
project.file("$buildDir/pluginUnderTestMetadata").mkdirs()
}
eclipse.classpath.defaultOutputDir = file('build-eclipse')
eclipse.classpath.file.whenMerged { classpath ->
// give each source folder a unique corresponding output folder
int i = 0;
classpath.entries.findAll { it instanceof SourceFolder }.each { folder ->
i++;
// this is *NOT* a path or a file.
folder.output = "build-eclipse/" + i
}
}
}
File licenseHeaderFile;
String prefix = ':x-pack';
File licenseHeaderFile
String prefix = ':x-pack'
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
prefix = prefix.replace(':', '_')
}

View File

@ -16,9 +16,8 @@
* specific language governing permissions and limitations
* under the License.
*/
import org.gradle.util.GradleVersion
import java.util.regex.Matcher
import org.gradle.util.GradleVersion
plugins {
id 'java-gradle-plugin'
@ -109,8 +108,9 @@ dependencies {
compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3'
compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4'
compile 'com.netflix.nebula:gradle-info-plugin:5.1.0'
compile 'org.eclipse.jgit:org.eclipse.jgit:5.5.0.201909110433-r'
compile 'com.netflix.nebula:gradle-info-plugin:3.0.3'
compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r'
compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE....
compile 'org.apache.rat:apache-rat:0.11'
compile "org.elasticsearch:jna:4.5.1"
@ -120,6 +120,7 @@ dependencies {
testCompile "junit:junit:${props.getProperty('junit')}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}"
testCompile 'com.github.tomakehurst:wiremock-jre8-standalone:2.23.2'
testCompile 'org.mockito:mockito-core:1.9.5'
minimumRuntimeCompile "junit:junit:${props.getProperty('junit')}"
minimumRuntimeCompile localGroovy()
minimumRuntimeCompile gradleApi()
@ -194,40 +195,6 @@ if (project != rootProject) {
}
}
String localDownloads = "${rootProject.buildDir}/local-downloads"
task setupLocalDownloads(type:Copy) {
from configurations.distribution
into localDownloads
}
task integTest(type: Test) {
// integration test requires the local testing repo for example plugin builds
dependsOn project.rootProject.allprojects.collect {
it.tasks.matching { it.name == 'publishNebulaPublicationToTestRepository'}
}
dependsOn setupLocalDownloads
exclude "**/*Tests.class"
inputs.dir(file("src/testKit"))
// tell BuildExamplePluginsIT where to find the example plugins
systemProperty (
'test.build-tools.plugin.examples',
files(
project(':example-plugins').subprojects.collect { it.projectDir }
).asPath,
)
systemProperty 'test.local-test-repo-path', "${rootProject.buildDir}/local-test-repo"
systemProperty 'test.local-test-downloads-path', localDownloads
systemProperty 'test.version_under_test', version
Matcher isLuceneSnapshot = (/\w+-snapshot-([a-z0-9]+)/ =~ versions.lucene)
if (isLuceneSnapshot) {
systemProperty 'test.lucene-snapshot-revision', isLuceneSnapshot[0][1]
}
maxParallelForks System.getProperty('tests.jvms', project.rootProject.ext.defaultParallel.toString()) as Integer
// These tests run Gradle which doesn't have FIPS support
onlyIf { project.inFipsJvm == false }
}
check.dependsOn(integTest)
// TODO: re-enable once randomizedtesting gradle code is published and removed from here
licenseHeaders.enabled = false
@ -250,6 +217,14 @@ if (project != rootProject) {
}
}
task integTest(type: Test) {
inputs.dir(file("src/testKit")).withPropertyName("testkit dir").withPathSensitivity(PathSensitivity.RELATIVE)
systemProperty 'test.version_under_test', version
onlyIf { project.inFipsJvm == false }
maxParallelForks = System.getProperty('tests.jvms', project.rootProject.ext.defaultParallel.toString()) as Integer
}
check.dependsOn(integTest)
/*
* We alread configure publication and we don't need or want this one that
* comes from the java-gradle-plugin.

View File

@ -1,2 +1 @@
include 'reaper'
include 'symbolic-link-preserving-tar'

View File

@ -86,7 +86,6 @@ import java.nio.file.Files
import java.util.regex.Matcher
import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure
/**
* Encapsulates build configuration for elasticsearch projects.
*/
@ -913,6 +912,11 @@ class BuildPlugin implements Plugin<Project> {
logging.exceptionFormat = 'full'
}
if (OS.current().equals(OS.WINDOWS) && System.getProperty('tests.timeoutSuite') == null) {
// override the suite timeout to 30 mins for windows, because it has the most inefficient filesystem known to man
test.systemProperty 'tests.timeoutSuite', '1800000!'
}
project.plugins.withType(ShadowPlugin).whenPluginAdded {
// Test against a shadow jar if we made one
test.classpath -= project.tasks.getByName('compileJava').outputs.files

View File

@ -20,20 +20,16 @@
package org.elasticsearch.gradle
import org.elasticsearch.gradle.precommit.DependencyLicensesTask
import org.gradle.api.DefaultTask
import org.gradle.api.artifacts.Configuration
import org.gradle.api.artifacts.Dependency
import org.gradle.api.artifacts.DependencyResolutionListener
import org.gradle.api.artifacts.DependencySet
import org.gradle.api.internal.ConventionTask
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.InputDirectory
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.OutputFile
import org.gradle.api.tasks.TaskAction
import java.util.regex.Matcher
import java.util.regex.Pattern
/**
* A task to gather information about the dependencies and export them into a csv file.
*
@ -46,31 +42,31 @@ import java.util.regex.Pattern
* </ul>
*
*/
public class DependenciesInfoTask extends ConventionTask {
class DependenciesInfoTask extends ConventionTask {
/** Dependencies to gather information from. */
@Input
public Configuration runtimeConfiguration
@InputFiles
Configuration runtimeConfiguration
/** We subtract compile-only dependencies. */
@Input
public Configuration compileOnlyConfiguration
private LinkedHashMap<String, String> mappings
@InputFiles
Configuration compileOnlyConfiguration
/** Directory to read license files */
@InputDirectory
public File licensesDir = new File(project.projectDir, 'licenses')
File licensesDir = new File(project.projectDir, 'licenses')
@OutputFile
File outputFile = new File(project.buildDir, "reports/dependencies/dependencies.csv")
public DependenciesInfoTask() {
private LinkedHashMap<String, String> mappings
DependenciesInfoTask() {
description = 'Create a CSV file with dependencies information.'
}
@TaskAction
public void generateDependenciesInfo() {
void generateDependenciesInfo() {
final DependencySet runtimeDependencies = runtimeConfiguration.getAllDependencies()
// we have to resolve the transitive dependencies and create a group:artifactId:version map

View File

@ -31,7 +31,7 @@ import java.nio.file.Path
/**
* Generates REST tests for each snippet marked // TEST.
*/
public class RestTestsFromSnippetsTask extends SnippetsTask {
class RestTestsFromSnippetsTask extends SnippetsTask {
/**
* These languages aren't supported by the syntax highlighter so we
* shouldn't use them.
@ -58,7 +58,7 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
@OutputDirectory
File testRoot = project.file('build/rest')
public RestTestsFromSnippetsTask() {
RestTestsFromSnippetsTask() {
project.afterEvaluate {
// Wait to set this so testRoot can be customized
project.sourceSets.test.output.dir(testRoot, builtBy: this)

View File

@ -28,6 +28,7 @@ import org.gradle.api.InvalidUserDataException
import org.gradle.api.file.ConfigurableFileTree
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.Internal
import org.gradle.api.tasks.TaskAction
import java.nio.file.Path
@ -36,7 +37,7 @@ import java.util.regex.Matcher
/**
* A task which will run a closure on each snippet in the documentation.
*/
public class SnippetsTask extends DefaultTask {
class SnippetsTask extends DefaultTask {
private static final String SCHAR = /(?:\\\/|[^\/])/
private static final String SUBSTITUTION = /s\/($SCHAR+)\/($SCHAR*)\//
private static final String CATCH = /catch:\s*((?:\/[^\/]+\/)|[^ \]]+)/
@ -51,6 +52,7 @@ public class SnippetsTask extends DefaultTask {
* Action to take on each snippet. Called with a single parameter, an
* instance of Snippet.
*/
@Internal
Closure perSnippet
/**
@ -73,7 +75,7 @@ public class SnippetsTask extends DefaultTask {
Map<String, String> defaultSubstitutions = [:]
@TaskAction
public void executeTask() {
void executeTask() {
/*
* Walks each line of each file, building snippets as it encounters
* the lines that make up the snippet.

View File

@ -62,14 +62,11 @@ class PluginBuildPlugin implements Plugin<Project> {
project.afterEvaluate {
boolean isXPackModule = project.path.startsWith(':x-pack:plugin')
boolean isModule = project.path.startsWith(':modules:') || isXPackModule
String name = extension.name
PluginPropertiesExtension extension1 = project.getExtensions().getByType(PluginPropertiesExtension.class)
String name = extension1.name
project.archivesBaseName = name
// set the project description so it will be picked up by publishing
project.description = extension.description
configurePublishing(project, extension)
project.description = extension1.description
configurePublishing(project, extension1)
if (project.plugins.hasPlugin(TestClustersPlugin.class) == false) {
project.integTestCluster.dependsOn(project.tasks.bundlePlugin)
if (isModule) {
@ -99,7 +96,29 @@ class PluginBuildPlugin implements Plugin<Project> {
}
}
}
if (extension1.name == null) {
throw new InvalidUserDataException('name is a required setting for esplugin')
}
if (extension1.description == null) {
throw new InvalidUserDataException('description is a required setting for esplugin')
}
if (extension1.classname == null) {
throw new InvalidUserDataException('classname is a required setting for esplugin')
}
Copy buildProperties = project.tasks.getByName('pluginProperties')
Map<String, String> properties = [
'name' : extension1.name,
'description' : extension1.description,
'version' : extension1.version,
'elasticsearchVersion': Version.fromString(VersionProperties.elasticsearch).toString(),
'javaVersion' : project.targetCompatibility as String,
'classname' : extension1.classname,
'extendedPlugins' : extension1.extendedPlugins.join(','),
'hasNativeController' : extension1.hasNativeController,
'requiresKeystore' : extension1.requiresKeystore
]
buildProperties.expand(properties)
buildProperties.inputs.properties(properties)
project.tasks.run.dependsOn(project.tasks.bundlePlugin)
if (isModule) {
project.tasks.run.clusterConfig.distribution = System.getProperty(
@ -108,9 +127,8 @@ class PluginBuildPlugin implements Plugin<Project> {
} else {
project.tasks.run.clusterConfig.plugin(project.path)
}
if (isModule == false || isXPackModule) {
addNoticeGeneration(project, extension)
addNoticeGeneration(project, extension1)
}
}
project.tasks.named('testingConventions').configure {
@ -150,7 +168,6 @@ class PluginBuildPlugin implements Plugin<Project> {
if (project.plugins.hasPlugin(MavenPublishPlugin)) {
project.publishing.publications.nebula(MavenPublication).artifactId(extension.name)
}
}
}
@ -207,36 +224,6 @@ class PluginBuildPlugin implements Plugin<Project> {
into("${project.buildDir}/generated-resources")
}
project.afterEvaluate {
// check require properties are set
if (extension.name == null) {
throw new InvalidUserDataException('name is a required setting for esplugin')
}
if (extension.description == null) {
throw new InvalidUserDataException('description is a required setting for esplugin')
}
if (extension.classname == null) {
throw new InvalidUserDataException('classname is a required setting for esplugin')
}
Map<String, String> properties = [
'name': extension.name,
'description': extension.description,
'version': extension.version,
'elasticsearchVersion': Version.fromString(VersionProperties.elasticsearch).toString(),
'javaVersion': project.targetCompatibility as String,
'classname': extension.classname,
'extendedPlugins': extension.extendedPlugins.join(','),
'hasNativeController': extension.hasNativeController,
'requiresKeystore': extension.requiresKeystore
]
buildProperties.configure {
expand(properties)
inputs.properties(properties)
}
}
// add the plugin properties and metadata to test resources, so unit tests can
// know about the plugin (used by test security code to statically initialize the plugin in unit tests)
SourceSet testSourceSet = project.sourceSets.test
@ -291,7 +278,7 @@ class PluginBuildPlugin implements Plugin<Project> {
/** Configure the pom for the main jar of this plugin */
protected void addNoticeGeneration(Project project, PluginPropertiesExtension extension) {
protected static void addNoticeGeneration(Project project, PluginPropertiesExtension extension) {
File licenseFile = extension.licenseFile
if (licenseFile != null) {
project.tasks.bundlePlugin.from(licenseFile.parentFile) {

View File

@ -26,20 +26,20 @@ import org.gradle.api.GradleException
import org.gradle.api.Task
import org.gradle.api.tasks.Exec
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.Internal
/**
* A fixture for integration tests which runs in a separate process launched by Ant.
*/
public class AntFixture extends AntTask implements Fixture {
class AntFixture extends AntTask implements Fixture {
/** The path to the executable that starts the fixture. */
@Input
@Internal
String executable
private final List<Object> arguments = new ArrayList<>()
@Input
public void args(Object... args) {
void args(Object... args) {
arguments.addAll(args)
}
@ -49,16 +49,15 @@ public class AntFixture extends AntTask implements Fixture {
*/
private final Map<String, Object> environment = new HashMap<>()
@Input
public void env(String key, Object value) {
void env(String key, Object value) {
environment.put(key, value)
}
/** A flag to indicate whether the command should be executed from a shell. */
@Input
@Internal
boolean useShell = false
@Input
@Internal
int maxWaitInSeconds = 30
/**
@ -72,6 +71,7 @@ public class AntFixture extends AntTask implements Fixture {
* as well as a groovy AntBuilder, to enable running ant condition checks. The default wait
* condition is for http on the http port.
*/
@Internal
Closure waitCondition = { AntFixture fixture, AntBuilder ant ->
File tmpFile = new File(fixture.cwd, 'wait.success')
ant.get(src: "http://${fixture.addressAndPort}",
@ -83,13 +83,14 @@ public class AntFixture extends AntTask implements Fixture {
private final Task stopTask
public AntFixture() {
AntFixture() {
stopTask = createStopTask()
finalizedBy(stopTask)
}
@Override
public Task getStopTask() {
@Internal
Task getStopTask() {
return stopTask
}
@ -168,6 +169,7 @@ public class AntFixture extends AntTask implements Fixture {
}
/** Returns a debug string used to log information about how the fixture was run. */
@Internal
protected String getCommandString() {
String commandString = "\n${name} configuration:\n"
commandString += "-----------------------------------------\n"
@ -247,46 +249,55 @@ public class AntFixture extends AntTask implements Fixture {
* A path relative to the build dir that all configuration and runtime files
* will live in for this fixture
*/
@Internal
protected File getBaseDir() {
return new File(project.buildDir, "fixtures/${name}")
}
/** Returns the working directory for the process. Defaults to "cwd" inside baseDir. */
@Internal
protected File getCwd() {
return new File(baseDir, 'cwd')
}
/** Returns the file the process writes its pid to. Defaults to "pid" inside baseDir. */
@Internal
protected File getPidFile() {
return new File(baseDir, 'pid')
}
/** Reads the pid file and returns the process' pid */
public int getPid() {
@Internal
int getPid() {
return Integer.parseInt(pidFile.getText('UTF-8').trim())
}
/** Returns the file the process writes its bound ports to. Defaults to "ports" inside baseDir. */
@Internal
protected File getPortsFile() {
return new File(baseDir, 'ports')
}
/** Returns an address and port suitable for a uri to connect to this node over http */
public String getAddressAndPort() {
@Internal
String getAddressAndPort() {
return portsFile.readLines("UTF-8").get(0)
}
/** Returns a file that wraps around the actual command when {@code spawn == true}. */
@Internal
protected File getWrapperScript() {
return new File(cwd, Os.isFamily(Os.FAMILY_WINDOWS) ? 'run.bat' : 'run')
}
/** Returns a file that the wrapper script writes when the command failed. */
@Internal
protected File getFailureMarker() {
return new File(cwd, 'run.failed')
}
/** Returns a file that the wrapper script writes when the command failed. */
@Internal
protected File getRunLog() {
return new File(cwd, 'run.log')
}

View File

@ -2,14 +2,16 @@ package org.elasticsearch.gradle.test
import org.gradle.api.DefaultTask
import org.gradle.api.Task
import org.gradle.api.tasks.Internal
import org.gradle.api.tasks.options.Option
import org.gradle.util.ConfigureUtil
public class RunTask extends DefaultTask {
class RunTask extends DefaultTask {
@Internal
ClusterConfiguration clusterConfig
public RunTask() {
RunTask() {
description = "Runs elasticsearch with '${project.path}'"
group = 'Verification'
clusterConfig = new ClusterConfiguration(project)
@ -26,13 +28,13 @@ public class RunTask extends DefaultTask {
option = "debug-jvm",
description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch."
)
public void setDebug(boolean enabled) {
void setDebug(boolean enabled) {
clusterConfig.debug = enabled;
}
/** Configure the cluster that will be run. */
@Override
public Task configure(Closure closure) {
Task configure(Closure closure) {
ConfigureUtil.configure(closure, clusterConfig)
return this
}

View File

@ -16,7 +16,7 @@ public abstract class AbstractLazyPropertyCollection {
this.owner = owner;
}
abstract List<? extends Object> getNormalizedCollection();
public abstract List<? extends Object> getNormalizedCollection();
void assertNotNull(Object value, String description) {
if (value == null) {

View File

@ -202,17 +202,22 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
}
String extension = distribution.getType().toString();
String classifier = "x86_64";
if (distribution.getVersion().before("7.0.0")) {
classifier = null; // no platform specific distros before 7.0
} else if (distribution.getType() == Type.ARCHIVE) {
String classifier = ":x86_64";
if (distribution.getType() == Type.ARCHIVE) {
extension = distribution.getPlatform() == Platform.WINDOWS ? "zip" : "tar.gz";
classifier = distribution.getPlatform() + "-" + classifier;
if (distribution.getVersion().onOrAfter("7.0.0")) {
classifier = ":" + distribution.getPlatform() + "-x86_64";
} else {
classifier = "";
}
} else if (distribution.getType() == Type.DEB) {
classifier = "amd64";
classifier = ":amd64";
}
return FAKE_IVY_GROUP + ":elasticsearch" + (distribution.getFlavor() == Flavor.OSS ? "-oss:" : ":")
+ distribution.getVersion() + (classifier == null ? "" : ":" + classifier) + "@" + extension;
String flavor = "";
if (distribution.getFlavor() == Flavor.OSS && distribution.getVersion().onOrAfter("6.3.0")) {
flavor = "-oss";
}
return FAKE_IVY_GROUP + ":elasticsearch" + flavor + ":" + distribution.getVersion() + classifier + "@" + extension;
}
private static Dependency projectDependency(Project project, String projectPath, String projectConfig) {
@ -246,8 +251,12 @@ public class DistributionDownloadPlugin implements Plugin<Project> {
projectName += "no-jdk-";
}
if (distribution.getType() == Type.ARCHIVE) {
Platform platform = distribution.getPlatform();
projectName += platform.toString() + (platform == Platform.WINDOWS ? "-zip" : "-tar");
if (distribution.getVersion().onOrAfter("7.0.0")) {
Platform platform = distribution.getPlatform();
projectName += platform.toString() + (platform == Platform.WINDOWS ? "-zip" : "-tar");
} else {
projectName = distribution.getFlavor().equals(Flavor.DEFAULT) ?"zip" : "oss-zip";
}
} else {
projectName += distribution.getType();
}

View File

@ -161,7 +161,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
return bundledJdk.getOrElse(true);
}
public void setBundledJdk(boolean bundledJdk) {
public void setBundledJdk(Boolean bundledJdk) {
this.bundledJdk.set(bundledJdk);
}
@ -197,15 +197,15 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
void finalizeValues() {
if (getType() == Type.INTEG_TEST_ZIP) {
if (platform.isPresent()) {
if (platform.getOrNull() != null) {
throw new IllegalArgumentException(
"platform not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
}
if (flavor.isPresent()) {
if (flavor.getOrNull() != null) {
throw new IllegalArgumentException(
"flavor not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
"flavor [" + flavor.get() + "] not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
}
if (bundledJdk.isPresent()) {
if (bundledJdk.getOrNull() != null) {
throw new IllegalArgumentException(
"bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]");
}

View File

@ -24,6 +24,7 @@ import javax.inject.Inject;
import org.gradle.api.DefaultTask;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.TaskAction;
import org.gradle.internal.file.Chmod;
@ -49,11 +50,16 @@ public class EmptyDirTask extends DefaultTask {
throw new UnsupportedOperationException();
}
@Input
@Internal
public File getDir() {
return dir;
}
@Input
public String getDirPath() {
return dir.getPath();
}
/**
* @param dir The directory to create
*/

View File

@ -26,7 +26,6 @@ import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Classpath;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.OutputDirectory;
import org.gradle.api.tasks.SkipWhenEmpty;
import org.gradle.api.tasks.StopExecutionException;
import org.gradle.api.tasks.TaskAction;
@ -66,7 +65,6 @@ public class ExportElasticsearchBuildResourcesTask extends DefaultTask {
}
@Input
@SkipWhenEmpty
public Set<String> getResources() {
return Collections.unmodifiableSet(resources);
}
@ -78,8 +76,8 @@ public class ExportElasticsearchBuildResourcesTask extends DefaultTask {
return System.getProperty("java.class.path");
}
public void setOutputDir(DirectoryProperty outputDir) {
this.outputDir = outputDir;
public void setOutputDir(File outputDir) {
this.outputDir.set(outputDir);
}
public File copy(String resource) {
@ -95,6 +93,7 @@ public class ExportElasticsearchBuildResourcesTask extends DefaultTask {
@TaskAction
public void doExport() {
if (resources.isEmpty()) {
setDidWork(false);
throw new StopExecutionException();
}
resources.stream().parallel()

View File

@ -1,7 +1,6 @@
package org.elasticsearch.gradle;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.Nested;
import java.util.ArrayList;
import java.util.Collection;
@ -171,8 +170,7 @@ public class LazyPropertyList<T> extends AbstractLazyPropertyCollection implemen
}
@Override
@Nested
List<? extends Object> getNormalizedCollection() {
public List<? extends Object> getNormalizedCollection() {
return delegate.stream()
.peek(this::validate)
.filter(entry -> entry.getNormalization() != PropertyNormalization.IGNORE_VALUE)

View File

@ -2,7 +2,6 @@ package org.elasticsearch.gradle;
import org.gradle.api.Named;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.Nested;
import java.util.Collection;
import java.util.LinkedHashMap;
@ -117,8 +116,7 @@ public class LazyPropertyMap<K, V> extends AbstractLazyPropertyCollection implem
}
@Override
@Nested
List<? extends Object> getNormalizedCollection() {
public List<? extends Object> getNormalizedCollection() {
return delegate.values().stream()
.peek(this::validate)
.filter(entry -> entry.getNormalization() != PropertyNormalization.IGNORE_VALUE)

View File

@ -28,6 +28,7 @@ import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputDirectory;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.Optional;
import org.gradle.api.tasks.TaskAction;
@ -306,6 +307,7 @@ public class DependencyLicensesTask extends DefaultTask {
return new File(licensesDir, jarName + SHA_EXTENSION);
}
@Internal
Set<File> getShaFiles() {
File[] array = licensesDir.listFiles();
if (array == null) {

View File

@ -87,7 +87,7 @@ public class ForbiddenPatternsTask extends DefaultTask {
@InputFiles
@SkipWhenEmpty
public FileCollection files() {
public FileCollection getFiles() {
return getProject().getConvention().getPlugin(JavaPluginConvention.class).getSourceSets()
.stream()
.map(sourceSet -> sourceSet.getAllSource().matching(filesFilter))
@ -99,7 +99,7 @@ public class ForbiddenPatternsTask extends DefaultTask {
public void checkInvalidPatterns() throws IOException {
Pattern allPatterns = Pattern.compile("(" + String.join(")|(", getPatterns().values()) + ")");
List<String> failures = new ArrayList<>();
for (File f : files()) {
for (File f : getFiles()) {
List<String> lines;
try(Stream<String> stream = Files.lines(f.toPath(), StandardCharsets.UTF_8)) {
lines = stream.collect(Collectors.toList());

View File

@ -22,6 +22,7 @@ package org.elasticsearch.gradle.precommit;
import org.gradle.api.DefaultTask;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.TaskAction;
import org.gradle.api.tasks.TaskProvider;
@ -77,6 +78,7 @@ public class UpdateShasTask extends DefaultTask {
Files.write(shaFile.toPath(), sha.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE);
}
@Internal
public DependencyLicensesTask getParentTask() {
return parentTask.get();
}

View File

@ -27,8 +27,8 @@ import org.gradle.api.GradleException;
import org.gradle.api.file.DirectoryProperty;
import org.gradle.api.file.RegularFileProperty;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFile;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.OutputDirectory;
import org.gradle.api.tasks.TaskAction;
@ -69,13 +69,17 @@ public class SymbolicLinkPreservingUntarTask extends DefaultTask {
private Function<String, Path> transform;
@Internal
public Function<String, Path> getTransform() {
return transform;
}
/**
* A transform to apply to the tar entry, to derive the relative path from the entry name. If the return value is null, the entry is
* dropped from the exploded tar archive.
*
* @param transform the transform
*/
@Input
public void setTransform(Function<String, Path> transform) {
this.transform = transform;
}

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.PropertyNormalization;
import org.elasticsearch.gradle.ReaperService;
@ -28,6 +27,7 @@ import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.Nested;
import java.io.File;
@ -59,24 +59,23 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
private final String clusterName;
private final NamedDomainObjectContainer<ElasticsearchNode> nodes;
private final File workingDirBase;
private final Function<Integer, ElasticsearchDistribution> distributionFactory;
private final LinkedHashMap<String, Predicate<TestClusterConfiguration>> waitConditions = new LinkedHashMap<>();
private final Project project;
private final ReaperService reaper;
private int nodeIndex = 0;
public ElasticsearchCluster(String path, String clusterName, Project project, ReaperService reaper,
Function<Integer, ElasticsearchDistribution> distributionFactory, File workingDirBase) {
public ElasticsearchCluster(String path, String clusterName, Project project,
ReaperService reaper, File workingDirBase) {
this.path = path;
this.clusterName = clusterName;
this.project = project;
this.reaper = reaper;
this.distributionFactory = distributionFactory;
this.workingDirBase = workingDirBase;
this.nodes = project.container(ElasticsearchNode.class);
this.nodes.add(
new ElasticsearchNode(
path, clusterName + "-0",
project, reaper, workingDirBase, distributionFactory.apply(0)
project, reaper, workingDirBase
)
);
// configure the cluster name eagerly so nodes know about it
@ -100,7 +99,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
for (int i = nodes.size() ; i < numberOfNodes; i++) {
this.nodes.add(new ElasticsearchNode(
path, clusterName + "-" + i, project, reaper, workingDirBase, distributionFactory.apply(i)
path, clusterName + "-" + i, project, reaper, workingDirBase
));
}
}
@ -109,14 +108,17 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
return nodes.getAt(clusterName + "-0");
}
@Internal
public int getNumberOfNodes() {
return nodes.size();
}
@Internal
public String getName() {
return clusterName;
}
@Internal
public String getPath() {
return path;
}
@ -126,6 +128,11 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
nodes.all(each -> each.setVersion(version));
}
@Override
public void setVersions(List<String> version) {
nodes.all(each -> each.setVersions(version));
}
@Override
public void setTestDistribution(TestDistribution distribution) {
nodes.all(each -> each.setTestDistribution(distribution));
@ -245,22 +252,70 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
@Override
public void start() {
commonNodeConfig();
nodes
.stream()
.filter(node -> {
if (node.getVersion().onOrAfter("6.5.0")) {
return true;
} else {
// We already started it to set seed nodes
return node.equals(nodes.iterator().next()) == false;
}
})
.forEach(ElasticsearchNode::start);
}
private void commonNodeConfig() {
final String nodeNames;
if (nodes.stream().map(ElasticsearchNode::getName).anyMatch( name -> name == null)) {
if (nodes.stream().map(ElasticsearchNode::getName).anyMatch(name -> name == null)) {
nodeNames = null;
} else {
nodeNames = nodes.stream().map(ElasticsearchNode::getName).collect(Collectors.joining(","));
};
nodeNames = nodes.stream().map(ElasticsearchNode::getName).map(this::safeName).collect(Collectors.joining(","));
}
ElasticsearchNode firstNode = null;
for (ElasticsearchNode node : nodes) {
// Can only configure master nodes if we have node names defined
if (nodeNames != null) {
// Can only configure master nodes if we have node names defined
if (node.getVersion().getMajor() >= 7) {
node.defaultConfig.put("cluster.initial_master_nodes", "[" + nodeNames + "]");
node.defaultConfig.put("discovery.seed_providers", "file");
node.defaultConfig.put("discovery.seed_hosts", "[]");
commonNodeConfig(node, nodeNames, firstNode);
}
if (firstNode == null) {
firstNode = node;
if (node.getVersion().before("6.5.0")) {
// We need to start the first node early to be able to provide unicast.hosts
firstNode.start();
}
}
}
}
private void commonNodeConfig(ElasticsearchNode node, String nodeNames, ElasticsearchNode firstNode) {
if (node.getVersion().onOrAfter("7.0.0")) {
node.defaultConfig.keySet().stream()
.filter(name -> name.startsWith("discovery.zen."))
.collect(Collectors.toList())
.forEach(node.defaultConfig::remove);
if (nodeNames != null) {
node.defaultConfig.put("cluster.initial_master_nodes", "[" + nodeNames + "]");
}
node.defaultConfig.put("discovery.seed_providers", "file");
node.defaultConfig.put("discovery.seed_hosts", "[]");
} else {
node.defaultConfig.put("discovery.zen.master_election.wait_for_joins_timeout", "5s");
if (nodes.size() > 1) {
node.defaultConfig.put("discovery.zen.minimum_master_nodes", Integer.toString(nodes.size() / 2 + 1));
}
if (node.getVersion().onOrAfter("6.5.0")) {
node.defaultConfig.put("discovery.zen.hosts_provider", "file");
node.defaultConfig.put("discovery.zen.ping.unicast.hosts", "[]");
} else {
if (firstNode == null) {
node.defaultConfig.put("discovery.zen.ping.unicast.hosts", "[]");
} else {
firstNode.waitForAllConditions();
node.defaultConfig.put("discovery.zen.ping.unicast.hosts", "[\"" + firstNode.getTransportPortURI() + "\"]");
}
}
node.start();
}
}
@ -269,6 +324,42 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
nodes.forEach(ElasticsearchNode::restart);
}
public void goToNextVersion() {
stop(false);
nodes.all(ElasticsearchNode::goToNextVersion);
start();
writeUnicastHostsFiles();
}
public void nextNodeToNextVersion() {
if (nodeIndex + 1 > nodes.size()) {
throw new TestClustersException("Ran out of nodes to take to the next version");
}
ElasticsearchNode node = nodes.getByName(clusterName + "-" + nodeIndex);
node.stop(false);
node.goToNextVersion();
commonNodeConfig(node, null, null);
// We need to translate these settings there as there's no support to do per version config for testclusters yet
if (node.getVersion().onOrAfter("7.0.0")) {
if (node.settings.containsKey("xpack.security.authc.realms.file1.type")) {
node.settings.remove("xpack.security.authc.realms.file1.type");
node.settings.put(
"xpack.security.authc.realms.file.file1.order",
node.settings.remove("xpack.security.authc.realms.file1.order")
);
}
if (node.settings.containsKey("xpack.security.authc.realms.native1.type")) {
node.settings.remove("xpack.security.authc.realms.native1.type");
node.settings.put(
"xpack.security.authc.realms.native.native1.order",
node.settings.remove("xpack.security.authc.realms.native1.order")
);
}
}
nodeIndex += 1;
node.start();
}
@Override
public void extraConfigFile(String destination, File from) {
nodes.all(node -> node.extraConfigFile(destination, from));
@ -296,33 +387,34 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
}
@Override
@Internal
public String getHttpSocketURI() {
waitForAllConditions();
return getFirstNode().getHttpSocketURI();
}
@Override
@Internal
public String getTransportPortURI() {
waitForAllConditions();
return getFirstNode().getTransportPortURI();
}
@Override
@Internal
public List<String> getAllHttpSocketURI() {
waitForAllConditions();
return nodes.stream().flatMap(each -> each.getAllHttpSocketURI().stream()).collect(Collectors.toList());
}
@Override
@Internal
public List<String> getAllTransportPortURI() {
waitForAllConditions();
return nodes.stream().flatMap(each -> each.getAllTransportPortURI().stream()).collect(Collectors.toList());
}
public void waitForAllConditions() {
LOGGER.info("Waiting for nodes");
nodes.forEach(ElasticsearchNode::waitForAllConditions);
writeUnicastHostsFiles();
LOGGER.info("Starting to wait for cluster to form");
@ -340,6 +432,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
}
@Override
@Internal
public boolean isProcessAlive() {
return nodes.stream().noneMatch(node -> node.isProcessAlive() == false);
}
@ -363,7 +456,6 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named {
nodes.size()
);
if (httpSslEnabled) {
getFirstNode().configureHttpWait(wait);
}
List<Map<String, String>> credentials = getFirstNode().getCredentials();

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.DistributionDownloadPlugin;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.LazyPropertyList;
@ -31,8 +32,8 @@ import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.http.WaitForHttpResource;
import org.gradle.api.Action;
import org.gradle.api.Named;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project;
import org.gradle.api.file.FileCollection;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
import org.gradle.api.tasks.Classpath;
@ -41,6 +42,7 @@ import org.gradle.api.tasks.InputFile;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.Nested;
import org.gradle.api.tasks.Optional;
import org.gradle.api.tasks.PathSensitive;
import org.gradle.api.tasks.PathSensitivity;
import org.gradle.api.tasks.util.PatternFilterable;
@ -71,6 +73,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
@ -118,7 +121,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private final LinkedHashMap<String, Predicate<TestClusterConfiguration>> waitConditions = new LinkedHashMap<>();
private final List<URI> plugins = new ArrayList<>();
private final List<File> modules = new ArrayList<>();
private final LazyPropertyMap<String, CharSequence> settings = new LazyPropertyMap<>("Settings", this);
final LazyPropertyMap<String, CharSequence> settings = new LazyPropertyMap<>("Settings", this);
private final LazyPropertyMap<String, CharSequence> keystoreSettings = new LazyPropertyMap<>("Keystore", this);
private final LazyPropertyMap<String, File> keystoreFiles = new LazyPropertyMap<>("Keystore files", this, FileEntry::new);
private final LazyPropertyMap<String, CharSequence> systemProperties = new LazyPropertyMap<>("System properties", this);
@ -137,23 +140,23 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private final Path esStdoutFile;
private final Path esStderrFile;
private final Path tmpDir;
private final Path distroDir;
private String version;
private int currentDistro = 0;
private TestDistribution testDistribution;
private ElasticsearchDistribution distribution;
private List<ElasticsearchDistribution> distributions = new ArrayList<>();
private File javaHome;
private volatile Process esProcess;
private Function<String, String> nameCustomization = Function.identity();
private boolean isWorkingDirConfigured = false;
ElasticsearchNode(String path, String name, Project project, ReaperService reaper, File workingDirBase,
ElasticsearchDistribution distribution) {
ElasticsearchNode(String path, String name, Project project, ReaperService reaper, File workingDirBase) {
this.path = path;
this.name = name;
this.project = project;
this.reaper = reaper;
this.workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath();
this.distribution = distribution;
workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath();
distroDir = workingDir.resolve("distro");
confPathRepo = workingDir.resolve("repo");
configFile = workingDir.resolve("config/elasticsearch.yml");
confPathData = workingDir.resolve("data");
@ -169,21 +172,44 @@ public class ElasticsearchNode implements TestClusterConfiguration {
setVersion(VersionProperties.getElasticsearch());
}
@Input
@Optional
public String getName() {
return nameCustomization.apply(name);
}
@Internal
public Version getVersion() {
return distribution.getVersion();
return distributions.get(currentDistro).getVersion();
}
@Override
public void setVersion(String version) {
requireNonNull(version, "null version passed when configuring test cluster `" + this + "`");
checkFrozen();
this.version = version;
this.distribution.setVersion(version);
distributions.clear();
doSetVersion(version);
}
@Override
public void setVersions(List<String> versions) {
requireNonNull(versions, "null version list passed when configuring test cluster `" + this + "`");
distributions.clear();
for (String version : versions) {
doSetVersion(version);
}
}
private void doSetVersion(String version) {
String distroName = "testclusters" + path.replace(":", "-") + "-" + this.name + "-" + version + "-";
NamedDomainObjectContainer<ElasticsearchDistribution> container = DistributionDownloadPlugin.getContainer(project);
if (container.findByName(distroName) == null) {
container.create(distroName);
}
ElasticsearchDistribution distro = container.getByName(distroName);
distro.setVersion(version);
setDistributionType(distro, testDistribution);
distributions.add(distro);
}
@Internal
@ -193,8 +219,8 @@ public class ElasticsearchNode implements TestClusterConfiguration {
// package private just so test clusters plugin can access to wire up task dependencies
@Internal
ElasticsearchDistribution getDistribution() {
return distribution;
List<ElasticsearchDistribution> getDistributions() {
return distributions;
}
@Override
@ -202,14 +228,24 @@ public class ElasticsearchNode implements TestClusterConfiguration {
requireNonNull(testDistribution, "null distribution passed when configuring test cluster `" + this + "`");
checkFrozen();
this.testDistribution = testDistribution;
for (ElasticsearchDistribution distribution : distributions) {
setDistributionType(distribution, testDistribution);
}
}
private void setDistributionType(ElasticsearchDistribution distribution, TestDistribution testDistribution) {
if (testDistribution == TestDistribution.INTEG_TEST) {
this.distribution.setType(ElasticsearchDistribution.Type.INTEG_TEST_ZIP);
distribution.setType(ElasticsearchDistribution.Type.INTEG_TEST_ZIP);
// we change the underlying distribution when changing the test distribution of the cluster.
distribution.setFlavor(null);
distribution.setPlatform(null);
distribution.setBundledJdk(null);
} else {
this.distribution.setType(ElasticsearchDistribution.Type.ARCHIVE);
distribution.setType(ElasticsearchDistribution.Type.ARCHIVE);
if (testDistribution == TestDistribution.DEFAULT) {
this.distribution.setFlavor(ElasticsearchDistribution.Flavor.DEFAULT);
distribution.setFlavor(ElasticsearchDistribution.Flavor.DEFAULT);
} else {
this.distribution.setFlavor(ElasticsearchDistribution.Flavor.OSS);
distribution.setFlavor(ElasticsearchDistribution.Flavor.OSS);
}
}
}
@ -313,14 +349,14 @@ public class ElasticsearchNode implements TestClusterConfiguration {
jvmArgs.addAll(Arrays.asList(values));
}
@Internal
public Path getConfigDir() {
return configFile.getParent();
}
@Override
public void freeze() {
requireNonNull(distribution, "null distribution passed when configuring test cluster `" + this + "`");
requireNonNull(getVersion(), "null version passed when configuring test cluster `" + this + "`");
requireNonNull(distributions, "null distribution passed when configuring test cluster `" + this + "`");
requireNonNull(javaHome, "null javaHome passed when configuring test cluster `" + this + "`");
LOGGER.info("Locking configuration of `{}`", this);
configurationFrozen.set(true);
@ -336,6 +372,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
this.javaHome = javaHome;
}
@Internal
public File getJavaHome() {
return javaHome;
}
@ -363,10 +400,13 @@ public class ElasticsearchNode implements TestClusterConfiguration {
try {
if (isWorkingDirConfigured == false) {
logToProcessStdout("Configuring working directory: " + workingDir);
// Only configure working dir once so we don't lose data on restarts
// make sure we always start fresh
if (Files.exists(workingDir)) {
project.delete(workingDir);
}
isWorkingDirConfigured = true;
createWorkingDir(getExtractedDistributionDir());
}
createWorkingDir(getExtractedDistributionDir());
} catch (IOException e) {
throw new UncheckedIOException("Failed to create working directory for " + this, e);
}
@ -380,6 +420,14 @@ public class ElasticsearchNode implements TestClusterConfiguration {
);
}
if (getVersion().before("6.3.0") && testDistribution == TestDistribution.DEFAULT) {
LOGGER.info("emulating the {} flavor for {} by installing x-pack", testDistribution, getVersion());
runElaticsearchBinScript(
"elasticsearch-plugin",
"install", "--batch", "x-pack"
);
}
if (keystoreSettings.isEmpty() == false || keystoreFiles.isEmpty() == false) {
logToProcessStdout("Adding " + keystoreSettings.size() + " keystore settings and " + keystoreFiles.size() + " keystore files");
runElaticsearchBinScript("elasticsearch-keystore", "create");
@ -402,13 +450,17 @@ public class ElasticsearchNode implements TestClusterConfiguration {
copyExtraConfigFiles();
if (isSettingMissingOrTrue("xpack.security.enabled")) {
logToProcessStdout("Setting up " + credentials.size() + " users");
if (isSettingTrue("xpack.security.enabled")) {
if (credentials.isEmpty()) {
user(Collections.emptyMap());
}
}
if (credentials.isEmpty() == false) {
logToProcessStdout("Setting up " + credentials.size() + " users");
credentials.forEach(paramMap -> runElaticsearchBinScript(
"elasticsearch-users",
getVersion().onOrAfter("6.3.0") ? "elasticsearch-users" : "x-pack/users",
paramMap.entrySet().stream()
.flatMap(entry -> Stream.of(entry.getKey(), entry.getValue()))
.toArray(String[]::new)
@ -438,17 +490,19 @@ public class ElasticsearchNode implements TestClusterConfiguration {
public void restart() {
LOGGER.info("Restarting {}", this);
stop(false);
try {
Files.delete(httpPortsFile);
Files.delete(transportPortFile);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
start();
}
private boolean isSettingMissingOrTrue(String name) {
void goToNextVersion() {
if (currentDistro + 1 >= distributions.size()) {
throw new TestClustersException("Ran out of versions to go to for " + this);
}
logToProcessStdout("Switch version from " + getVersion() + " to " + distributions.get(currentDistro + 1).getVersion());
currentDistro += 1;
setting("node.attr.upgraded", "true");
}
private boolean isSettingTrue(String name) {
return Boolean.valueOf(settings.getOrDefault(name, "false").toString());
}
@ -476,8 +530,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
if (testDistribution == TestDistribution.INTEG_TEST) {
logToProcessStdout("Installing " + modules.size() + "modules");
for (File module : modules) {
Path destination = workingDir.resolve("modules").resolve(module.getName().replace(".zip", "")
.replace("-" + version, ""));
Path destination = distroDir.resolve("modules").resolve(module.getName().replace(".zip", "")
.replace("-" + getVersion(), "")
.replace("-SNAPSHOT", ""));
// only install modules that are not already bundled with the integ-test distribution
if (Files.exists(destination) == false) {
@ -494,7 +549,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
}
} else {
LOGGER.info("Not installing " + modules.size() + "(s) since the " + distribution + " distribution already " +
LOGGER.info("Not installing " + modules.size() + "(s) since the " + distributions + " distribution already " +
"has them");
}
}
@ -535,8 +590,8 @@ public class ElasticsearchNode implements TestClusterConfiguration {
private void runElaticsearchBinScriptWithInput(String input, String tool, String... args) {
if (
Files.exists(workingDir.resolve("bin").resolve(tool)) == false &&
Files.exists(workingDir.resolve("bin").resolve(tool + ".bat")) == false
Files.exists(distroDir.resolve("bin").resolve(tool)) == false &&
Files.exists(distroDir.resolve("bin").resolve(tool + ".bat")) == false
) {
throw new TestClustersException("Can't run bin script: `" + tool + "` does not exist. " +
"Is this the distribution you expect it to be ?");
@ -544,7 +599,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
try (InputStream byteArrayInputStream = new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8))) {
LoggedExec.exec(project, spec -> {
spec.setEnvironment(getESEnvironment());
spec.workingDir(workingDir);
spec.workingDir(distroDir);
spec.executable(
OS.conditionalString()
.onUnix(() -> "./bin/" + tool)
@ -629,8 +684,8 @@ public class ElasticsearchNode implements TestClusterConfiguration {
final ProcessBuilder processBuilder = new ProcessBuilder();
List<String> command = OS.<List<String>>conditional()
.onUnix(() -> Arrays.asList("./bin/elasticsearch"))
.onWindows(() -> Arrays.asList("cmd", "/c", "bin\\elasticsearch.bat"))
.onUnix(() -> Arrays.asList(distroDir.getFileName().resolve("./bin/elasticsearch").toString()))
.onWindows(() -> Arrays.asList("cmd", "/c", distroDir.getFileName().resolve("bin\\elasticsearch.bat").toString()))
.supply();
processBuilder.command(command);
processBuilder.directory(workingDir.toFile());
@ -651,35 +706,54 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
@Override
@Internal
public String getHttpSocketURI() {
return getHttpPortInternal().get(0);
}
@Override
@Internal
public String getTransportPortURI() {
return getTransportPortInternal().get(0);
}
@Override
@Internal
public List<String> getAllHttpSocketURI() {
waitForAllConditions();
return getHttpPortInternal();
}
@Override
@Internal
public List<String> getAllTransportPortURI() {
waitForAllConditions();
return getTransportPortInternal();
}
@Internal
public File getServerLog() {
return confPathLogs.resolve(defaultConfig.get("cluster.name") + "_server.json").toFile();
}
@Internal
public File getAuditLog() {
return confPathLogs.resolve(defaultConfig.get("cluster.name") + "_audit.json").toFile();
}
@Override
public synchronized void stop(boolean tailLogs) {
logToProcessStdout("Stopping node");
try {
if (Files.exists(httpPortsFile)) {
Files.delete(httpPortsFile);
}
if (Files.exists(transportPortFile)) {
Files.delete(transportPortFile);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
if (esProcess == null && tailLogs) {
// This is a special case. If start() throws an exception the plugin will still call stop
// Another exception here would eat the orriginal.
@ -694,6 +768,17 @@ public class ElasticsearchNode implements TestClusterConfiguration {
logFileContents("Standard error of node", esStderrFile);
}
esProcess = null;
// Clean up the ports file in case this is started again.
try {
if (Files.exists(httpPortsFile)) {
Files.delete(httpPortsFile);
}
if (Files.exists(transportPortFile)) {
Files.delete(transportPortFile);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@Override
@ -830,7 +915,9 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private void createWorkingDir(Path distroExtractDir) throws IOException {
syncWithLinks(distroExtractDir, workingDir);
syncWithLinks(distroExtractDir, distroDir);
// Start configuration from scratch in case of a restart
project.delete(configFile.getParent());
Files.createDirectories(configFile.getParent());
Files.createDirectories(confPathRepo);
Files.createDirectories(confPathData);
@ -853,7 +940,14 @@ public class ElasticsearchNode implements TestClusterConfiguration {
try (Stream<Path> stream = Files.walk(sourceRoot)) {
stream.forEach(source -> {
Path destination = destinationRoot.resolve(sourceRoot.relativize(source));
Path relativeDestination = sourceRoot.relativize(source);
if (relativeDestination.getNameCount() <= 1) {
return;
}
// Throw away the first name as the archives have everything in a single top level folder we are not interested in
relativeDestination = relativeDestination.subpath(1, relativeDestination.getNameCount());
Path destination = destinationRoot.resolve(relativeDestination);
if (Files.isDirectory(source)) {
try {
Files.createDirectories(destination);
@ -933,9 +1027,6 @@ public class ElasticsearchNode implements TestClusterConfiguration {
.forEach(defaultConfig::remove);
try {
// We create hard links for the distribution, so we need to remove the config file before writing it
// to prevent the changes to reflect across all copies.
Files.delete(configFile);
Files.write(
configFile,
Stream.concat(
@ -944,8 +1035,21 @@ public class ElasticsearchNode implements TestClusterConfiguration {
)
.map(entry -> entry.getKey() + ": " + entry.getValue())
.collect(Collectors.joining("\n"))
.getBytes(StandardCharsets.UTF_8)
.getBytes(StandardCharsets.UTF_8),
StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE
);
final List<Path> configFiles;
try (Stream<Path> stream = Files.list(distroDir.resolve("config"))) {
configFiles = stream.collect(Collectors.toList());
}
logToProcessStdout("Copying additional config files from distro " + configFiles);
for (Path file : configFiles) {
Path dest = configFile.getParent().resolve(file.getFileName());
if (Files.exists(dest) == false) {
Files.copy(file, dest);
}
}
} catch (IOException e) {
throw new UncheckedIOException("Could not write config file: " + configFile, e);
}
@ -985,7 +1089,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
private Path getExtractedDistributionDir() {
return Paths.get(distribution.getExtracted().toString()).resolve("elasticsearch-" + version);
return Paths.get(distributions.get(currentDistro).getExtracted().toString());
}
private List<File> getInstalledFileSet(Action<? super PatternFilterable> filter) {
@ -1003,74 +1107,82 @@ public class ElasticsearchNode implements TestClusterConfiguration {
}
@Input
private Set<URI> getRemotePlugins() {
public Set<URI> getRemotePlugins() {
Set<URI> file = plugins.stream().filter(uri -> uri.getScheme().equalsIgnoreCase("file") == false).collect(Collectors.toSet());
return file;
}
@Classpath
private List<File> getInstalledClasspath() {
public List<File> getInstalledClasspath() {
return getInstalledFileSet(filter -> filter.include("**/*.jar"));
}
@InputFiles
@PathSensitive(PathSensitivity.RELATIVE)
private List<File> getInstalledFiles() {
public List<File> getInstalledFiles() {
return getInstalledFileSet(filter -> filter.exclude("**/*.jar"));
}
@Classpath
private List<File> getDistributionClasspath() {
ArrayList<File> files = new ArrayList<>(project.fileTree(getExtractedDistributionDir())
.matching(filter -> filter.include("**/*.jar"))
.getFiles());
files.sort(Comparator.comparing(File::getName));
return files;
public Set<File> getDistributionClasspath() {
return getDistributionFiles(filter -> filter.include("**/*.jar"));
}
@InputFiles
@PathSensitive(PathSensitivity.RELATIVE)
private FileCollection getDistributionFiles() {
return project.fileTree(getExtractedDistributionDir()).minus(project.files(getDistributionClasspath()));
public Set<File> getDistributionFiles() {
return getDistributionFiles(filter -> filter.exclude("**/*.jar"));
}
private Set<File> getDistributionFiles(Action<PatternFilterable> patternFilter) {
Set<File> files = new TreeSet<>();
for (ElasticsearchDistribution distribution : distributions) {
files.addAll(
project.fileTree(Paths.get(distribution.getExtracted().toString()))
.matching(patternFilter)
.getFiles()
);
}
return files;
}
@Nested
private Map<String, CharSequence> getKeystoreSettings() {
return keystoreSettings;
public List<?> getKeystoreSettings() {
return keystoreSettings.getNormalizedCollection();
}
@Nested
private Map<String, File> getKeystoreFiles() {
return keystoreFiles;
public List<?> getKeystoreFiles() {
return keystoreFiles.getNormalizedCollection();
}
@Nested
private Map<String, CharSequence> getSettings() {
return settings;
public List<?> getSettings() {
return settings.getNormalizedCollection();
}
@Nested
private Map<String, CharSequence> getSystemProperties() {
return systemProperties;
public List<?> getSystemProperties() {
return systemProperties.getNormalizedCollection();
}
@Nested
private Map<String, CharSequence> getEnvironment() {
return environment;
public List<?> getEnvironment() {
return environment.getNormalizedCollection();
}
@Nested
private List<CharSequence> getJvmArgs() {
return jvmArgs;
public List<?> getJvmArgs() {
return jvmArgs.getNormalizedCollection();
}
@Nested
private Map<String, File> getExtraConfigFiles() {
return extraConfigFiles;
public List<?> getExtraConfigFiles() {
return extraConfigFiles.getNormalizedCollection();
}
@Override
@Internal
public boolean isProcessAlive() {
requireNonNull(
esProcess,
@ -1136,6 +1248,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
return Files.exists(httpPortsFile) && Files.exists(transportPortFile);
}
@Internal
public boolean isHttpSslEnabled() {
return Boolean.valueOf(
settings.getOrDefault("xpack.security.http.ssl.enabled", "false").toString()

View File

@ -38,6 +38,8 @@ public interface TestClusterConfiguration {
void setVersion(String version);
void setVersions(List<String> version);
void setTestDistribution(TestDistribution distribution);
void plugin(URI plugin);
@ -165,7 +167,7 @@ public interface TestClusterConfiguration {
default String safeName(String name) {
return name
.replaceAll("^[^a-zA-Z0-9]+", "")
.replaceAll("[^a-zA-Z0-9]+", "-");
.replaceAll("[^a-zA-Z0-9\\.]+", "-");
}
boolean isProcessAlive();

View File

@ -18,9 +18,9 @@ interface TestClustersAware extends Task {
);
}
for (ElasticsearchNode node : cluster.getNodes()) {
this.dependsOn(node.getDistribution().getExtracted());
}
cluster.getNodes().stream().flatMap(node -> node.getDistributions().stream()).forEach( distro ->
dependsOn(distro.getExtracted())
);
getClusters().add(cluster);
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.DistributionDownloadPlugin;
import org.elasticsearch.gradle.ElasticsearchDistribution;
import org.elasticsearch.gradle.ReaperPlugin;
import org.elasticsearch.gradle.ReaperService;
import org.gradle.api.NamedDomainObjectContainer;
@ -57,7 +56,7 @@ public class TestClustersPlugin implements Plugin<Project> {
// provide a task to be able to list defined clusters.
createListClustersTask(project, container);
if (project.getRootProject().getExtensions().findByType(TestClustersRegistry.class) == null) {
if (project.getRootProject().getExtensions().findByName("testClusters") == null) {
TestClustersRegistry registry = project.getRootProject().getExtensions()
.create("testClusters", TestClustersRegistry.class);
@ -76,8 +75,6 @@ public class TestClustersPlugin implements Plugin<Project> {
}
private NamedDomainObjectContainer<ElasticsearchCluster> createTestClustersContainerExtension(Project project) {
NamedDomainObjectContainer<ElasticsearchDistribution> distros = DistributionDownloadPlugin.getContainer(project);
// Create an extensions that allows describing clusters
NamedDomainObjectContainer<ElasticsearchCluster> container = project.container(
ElasticsearchCluster.class,
@ -86,7 +83,6 @@ public class TestClustersPlugin implements Plugin<Project> {
name,
project,
reaper,
i -> distros.create(name + "-" + i),
new File(project.getBuildDir(), "testclusters")
)
);

View File

@ -15,6 +15,8 @@ eclipse.preferences.version=1
# org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning
# org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning
# We check this in Gradle. Eclipse fails this check because it doesn't have separate class-paths for
org.eclipse.jdt.core.circularClasspath=warning
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.formatter.comment.line_length=140
org.eclipse.jdt.core.formatter.lineSplit=140

View File

@ -15,6 +15,10 @@ public class VersionProperties {
return elasticsearch;
}
public static Version getElasticsearchVersion() {
return Version.fromString(elasticsearch);
}
public static String getLucene() {
return lucene;
}

View File

@ -1,64 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.doc
import org.elasticsearch.gradle.test.GradleUnitTestCase
import org.gradle.api.InvalidUserDataException
import org.junit.Rule
import org.junit.rules.ExpectedException
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.shouldAddShardFailureCheck
class RestTestFromSnippetsTaskTests extends GradleUnitTestCase {
@Rule
public ExpectedException expectedEx = ExpectedException.none()
void testInvalidBlockQuote() {
String input = "\"foo\": \"\"\"bar\""
expectedEx.expect(InvalidUserDataException.class)
expectedEx.expectMessage("Invalid block quote starting at 7 in:\n$input")
replaceBlockQuote(input)
}
void testSimpleBlockQuote() {
assertEquals("\"foo\": \"bort baz\"",
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\""))
}
void testMultipleBlockQuotes() {
assertEquals("\"foo\": \"bort baz\", \"bar\": \"other\"",
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\""))
}
void testEscapingInBlockQuote() {
assertEquals("\"foo\": \"bort\\\" baz\"",
replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\""))
assertEquals("\"foo\": \"bort\\n baz\"",
replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\""))
}
void testIsDocWriteRequest() {
assertTrue(shouldAddShardFailureCheck("doc-index/_search"));
assertFalse(shouldAddShardFailureCheck("_cat"))
assertFalse(shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview"));
}
}

View File

@ -1,161 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.apache.commons.io.FileUtils;
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.GradleRunner;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Ignore("https://github.com/elastic/elasticsearch/issues/42453")
public class BuildExamplePluginsIT extends GradleIntegrationTestCase {
private static final List<File> EXAMPLE_PLUGINS = Collections.unmodifiableList(
Arrays.stream(
Objects.requireNonNull(System.getProperty("test.build-tools.plugin.examples"))
.split(File.pathSeparator)
).map(File::new).collect(Collectors.toList())
);
private static final String BUILD_TOOLS_VERSION = Objects.requireNonNull(System.getProperty("test.version_under_test"));
@Rule
public TemporaryFolder tmpDir = new TemporaryFolder();
public final File examplePlugin;
public BuildExamplePluginsIT(File examplePlugin) {
this.examplePlugin = examplePlugin;
}
@BeforeClass
public static void assertProjectsExist() {
assertEquals(
EXAMPLE_PLUGINS,
EXAMPLE_PLUGINS.stream().filter(File::exists).collect(Collectors.toList())
);
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
return EXAMPLE_PLUGINS
.stream()
.map(each -> new Object[] {each})
.collect(Collectors.toList());
}
public void testCurrentExamplePlugin() throws IOException {
FileUtils.copyDirectory(examplePlugin, tmpDir.getRoot(), pathname -> pathname.getPath().contains("/build/") == false);
adaptBuildScriptForTest();
Files.write(
tmpDir.newFile("NOTICE.txt").toPath(),
"dummy test notice".getBytes(StandardCharsets.UTF_8)
);
GradleRunner.create()
.withProjectDir(tmpDir.getRoot())
.withArguments("clean", "check", "-s", "-i", "--warning-mode=all", "--scan")
.withPluginClasspath()
.build();
}
private void adaptBuildScriptForTest() throws IOException {
// Add the local repo as a build script URL so we can pull in build-tools and apply the plugin under test
// we need to specify the exact version of build-tools because gradle automatically adds its plugin portal
// which appears to mirror jcenter, opening us up to pulling a "later" version of build-tools
writeBuildScript(
"buildscript {\n" +
" repositories {\n" +
" maven {\n" +
" name = \"test\"\n" +
" url = '" + getLocalTestRepoPath() + "'\n" +
" }\n" +
" }\n" +
" dependencies {\n" +
" classpath \"org.elasticsearch.gradle:build-tools:" + BUILD_TOOLS_VERSION + "\"\n" +
" }\n" +
"}\n"
);
// get the original file
Files.readAllLines(getTempPath("build.gradle"), StandardCharsets.UTF_8)
.stream()
.map(line -> line + "\n")
.forEach(this::writeBuildScript);
// Add a repositories section to be able to resolve dependencies
String luceneSnapshotRepo = "";
String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision");
if (luceneSnapshotRepo != null) {
luceneSnapshotRepo = " maven {\n" +
" name \"lucene-snapshots\"\n" +
" url \"https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision + "\"\n" +
" }\n";
}
writeBuildScript("\n" +
"repositories {\n" +
" maven {\n" +
" name \"test\"\n" +
" url \"" + getLocalTestRepoPath() + "\"\n" +
" }\n" +
" flatDir {\n" +
" dir '" + getLocalTestDownloadsPath() + "'\n" +
" }\n" +
luceneSnapshotRepo +
"}\n"
);
Files.delete(getTempPath("build.gradle"));
Files.move(getTempPath("build.gradle.new"), getTempPath("build.gradle"));
System.err.print("Generated build script is:");
Files.readAllLines(getTempPath("build.gradle")).forEach(System.err::println);
}
private Path getTempPath(String fileName) {
return new File(tmpDir.getRoot(), fileName).toPath();
}
private Path writeBuildScript(String script) {
try {
Path path = getTempPath("build.gradle.new");
return Files.write(
path,
script.getBytes(StandardCharsets.UTF_8),
Files.exists(path) ? StandardOpenOption.APPEND : StandardOpenOption.CREATE_NEW
);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -51,7 +51,7 @@ public class BuildPluginIT extends GradleIntegrationTestCase {
public void testCheckTask() {
BuildResult result = getGradleRunner("elasticsearch.build")
.withArguments("check", "assemble", "-s", "-Dlocal.repo.path=" + getLocalTestRepoPath())
.withArguments("check", "assemble", "-s")
.build();
assertTaskSuccessful(result, ":check");
}
@ -103,7 +103,7 @@ public class BuildPluginIT extends GradleIntegrationTestCase {
public void testLicenseAndNotice() throws IOException {
BuildResult result = getGradleRunner("elasticsearch.build")
.withArguments("clean", "assemble", "-s", "-Dlocal.repo.path=" + getLocalTestRepoPath())
.withArguments("clean", "assemble")
.build();
assertTaskSuccessful(result, ":assemble");

View File

@ -50,12 +50,17 @@ public class DistributionDownloadPluginIT extends GradleIntegrationTestCase {
public void testBwc() throws Exception {
assertExtractedDistro("1.1.0", "archive", "linux", null, null,
"tests.local_distro.config", "linux-tar",
"tests.local_distro.config", "zip",
"tests.local_distro.project", ":distribution:bwc:minor",
"tests.current_version", "2.0.0");
}
public void testReleased() throws Exception {
doTestReleased("7.0.0", "/downloads/elasticsearch/elasticsearch-7.0.0-windows-x86_64.zip");
doTestReleased("6.5.0", "/downloads/elasticsearch/elasticsearch-6.5.0.zip");
}
private void doTestReleased(String version, String urlPath) throws IOException {
WireMockServer wireMock = new WireMockServer(0);
try {
final byte[] filebytes;
@ -63,12 +68,11 @@ public class DistributionDownloadPluginIT extends GradleIntegrationTestCase {
Files.newInputStream(Paths.get("src/testKit/distribution-download/distribution/files/fake_elasticsearch.zip"))) {
filebytes = stream.readAllBytes();
}
String urlPath = "/downloads/elasticsearch/elasticsearch-7.0.0-windows-x86_64.zip";
wireMock.stubFor(head(urlEqualTo(urlPath)).willReturn(aResponse().withStatus(200)));
wireMock.stubFor(get(urlEqualTo(urlPath)).willReturn(aResponse().withStatus(200).withBody(filebytes)));
wireMock.start();
assertExtractedDistro("7.0.0", "archive", "windows", null, null,
assertExtractedDistro(version, "archive", "windows", null, null,
"tests.download_service", wireMock.baseUrl());
} catch (Exception e) {
// for debugging
@ -99,7 +103,6 @@ public class DistributionDownloadPluginIT extends GradleIntegrationTestCase {
assert sysProps.length % 2 == 0;
List<String> args = new ArrayList<>();
args.add(taskname);
args.add("-Dlocal.repo.path=" + getLocalTestRepoPath());
for (int i = 0; i < sysProps.length; i += 2) {
args.add("-D" + sysProps[i] + "=" + sysProps[i + 1]);
}

View File

@ -90,7 +90,7 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase {
public void testFlavorForIntegTest() {
assertDistroError(createProject(null),
"testdistro", "5.0.0", Type.INTEG_TEST_ZIP, null, Flavor.OSS, null,
"flavor not allowed for elasticsearch distribution [testdistro]");
"flavor [oss] not allowed for elasticsearch distribution [testdistro] of type [integ_test_zip]");
}
public void testBundledJdkDefault() {

View File

@ -109,7 +109,6 @@ public abstract class JdkDownloadPluginIT extends GradleIntegrationTestCase {
GradleRunner runner = GradleRunner.create().withProjectDir(getProjectDir("jdk-download"))
.withArguments(taskname,
"-Dlocal.repo.path=" + getLocalTestRepoPath(),
"-Dtests.jdk_vendor=" + vendor,
"-Dtests.jdk_version=" + version,
"-Dtests.jdk_repo=" + wireMock.baseUrl(),

View File

@ -0,0 +1,40 @@
package org.elasticsearch.gradle.doc;
import org.elasticsearch.gradle.test.GradleUnitTestCase;
import org.gradle.api.InvalidUserDataException;
import org.junit.Rule;
import org.junit.rules.ExpectedException;
import static org.elasticsearch.gradle.doc.RestTestsFromSnippetsTask.replaceBlockQuote;
public class RestTestFromSnippetsTaskTests extends GradleUnitTestCase {
@Rule
public ExpectedException expectedEx = ExpectedException.none();
public void testInvalidBlockQuote() {
String input = "\"foo\": \"\"\"bar\"";
expectedEx.expect(InvalidUserDataException.class);
expectedEx.expectMessage("Invalid block quote starting at 7 in:\n" + input);
replaceBlockQuote(input);
}
public void testSimpleBlockQuote() {
assertEquals("\"foo\": \"bort baz\"", replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\""));
}
public void testMultipleBlockQuotes() {
assertEquals("\"foo\": \"bort baz\", \"bar\": \"other\"",
replaceBlockQuote("\"foo\": \"\"\"bort baz\"\"\", \"bar\": \"\"\"other\"\"\""));
}
public void testEscapingInBlockQuote() {
assertEquals("\"foo\": \"bort\\\" baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\" baz\"\"\""));
assertEquals("\"foo\": \"bort\\n baz\"", replaceBlockQuote("\"foo\": \"\"\"bort\n baz\"\"\""));
}
public void testIsDocWriteRequest() {
assertTrue((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("doc-index/_search"));
assertFalse((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("_cat"));
assertFalse((boolean) RestTestsFromSnippetsTask.shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview"));
}
}

View File

@ -0,0 +1,69 @@
package org.elasticsearch.gradle.plugin;
import org.elasticsearch.gradle.BwcVersions;
import org.elasticsearch.gradle.test.GradleUnitTestCase;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.internal.project.ProjectInternal;
import org.gradle.testfixtures.ProjectBuilder;
import org.junit.Before;
import org.junit.Ignore;
import org.mockito.Mockito;
import java.util.stream.Collectors;
public class PluginBuildPluginTests extends GradleUnitTestCase {
private Project project;
@Before
public void setUp() throws Exception {
project = ProjectBuilder.builder()
.withName(getClass().getName())
.build();
}
public void testApply() {
// FIXME: distribution download plugin doesn't support running externally
project.getExtensions().getExtraProperties().set(
"bwcVersions", Mockito.mock(BwcVersions.class)
);
project.getPlugins().apply(PluginBuildPlugin.class);
assertNotNull(
"plugin extension created with the right name",
project.getExtensions().findByName(PluginBuildPlugin.PLUGIN_EXTENSION_NAME)
);
assertNotNull(
"plugin extensions has the right type",
project.getExtensions().findByType(PluginPropertiesExtension.class)
);
assertNotNull(
"plugin created an integTest class",
project.getTasks().findByName("integTest")
);
}
@Ignore("https://github.com/elastic/elasticsearch/issues/47123")
public void testApplyWithAfterEvaluate() {
project.getExtensions().getExtraProperties().set(
"bwcVersions", Mockito.mock(BwcVersions.class)
);
project.getPlugins().apply(PluginBuildPlugin.class);
PluginPropertiesExtension extension = project.getExtensions().getByType(PluginPropertiesExtension.class);
extension.setNoticeFile(project.file("test.notice"));
extension.setLicenseFile(project.file("test.license"));
extension.setDescription("just a test");
extension.setClassname(getClass().getName());
((ProjectInternal) project).evaluate();
assertNotNull(
"Task to generate notice not created: " + project.getTasks().stream()
.map(Task::getPath)
.collect(Collectors.joining(", ")),
project.getTasks().findByName("generateNotice")
);
}
}

View File

@ -157,10 +157,6 @@ public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {
);
}
protected String getLocalTestRepoPath() {
return getLocalTestPath("test.local-test-repo-path");
}
protected String getLocalTestDownloadsPath() {
return getLocalTestPath("test.local-test-downloads-path");
}

View File

@ -1,219 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.testclusters;
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.BuildResult;
import org.gradle.testkit.runner.GradleRunner;
import org.junit.Before;
import org.junit.Ignore;
import java.util.Arrays;
@Ignore("https://github.com/elastic/elasticsearch/issues/42453")
public class TestClustersPluginIT extends GradleIntegrationTestCase {
private GradleRunner runner;
@Before
public void setUp() throws Exception {
runner = getGradleRunner("testclusters");
}
public void testListClusters() {
BuildResult result = getTestClustersRunner("listTestClusters").build();
assertTaskSuccessful(result, ":listTestClusters");
assertOutputContains(
result.getOutput(),
" * myTestCluster:"
);
}
public void testUseClusterByOne() {
BuildResult result = getTestClustersRunner(":user1").build();
assertTaskSuccessful(result, ":user1");
assertStartedAndStoppedOnce(result);
}
public void testUseClusterByOneWithDryRun() {
BuildResult result = getTestClustersRunner("--dry-run", ":user1").build();
assertNull(result.task(":user1"));
assertNotStarted(result);
}
public void testUseClusterByTwo() {
BuildResult result = getTestClustersRunner(":user1", ":user2").build();
assertTaskSuccessful(result, ":user1", ":user2");
assertStartedAndStoppedOnce(result);
}
public void testUseClusterByUpToDateTask() {
// Run it once, ignoring the result and again to make sure it's considered up to date.
// Gradle randomly considers tasks without inputs and outputs as as up-to-date or success on the first run
getTestClustersRunner(":upToDate1").build();
BuildResult result = getTestClustersRunner(":upToDate1").build();
assertTaskUpToDate(result, ":upToDate1");
assertNotStarted(result);
}
public void testUseClusterBySkippedTask() {
BuildResult result = getTestClustersRunner(":skipped1", ":skipped2").build();
assertTaskSkipped(result, ":skipped1", ":skipped2");
assertNotStarted(result);
}
public void testUseClusterBySkippedAndWorkingTask() {
BuildResult result = getTestClustersRunner("skipped1", "user1").build();
assertTaskSkipped(result, ":skipped1");
assertTaskSuccessful(result, ":user1");
assertOutputContains(
result.getOutput(),
"> Task :user1",
"Starting `node{::myTestCluster-0}`",
"Stopping `node{::myTestCluster-0}`"
);
}
@Ignore // https://github.com/elastic/elasticsearch/issues/41256
public void testMultiProject() {
BuildResult result = getTestClustersRunner(
"user1", "user2", "-s", "-i", "--parallel", "-Dlocal.repo.path=" + getLocalTestRepoPath()
).build();
assertTaskSuccessful(
result,
":user1", ":user2", ":alpha:user1", ":alpha:user2", ":bravo:user1", ":bravo:user2"
);
assertStartedAndStoppedOnce(result);
assertOutputOnlyOnce(
result.getOutput(),
"Starting `node{:alpha:myTestCluster-0}`",
"Stopping `node{::myTestCluster-0}`"
);
assertOutputOnlyOnce(
result.getOutput(),
"Starting `node{::myTestCluster-0}`",
"Stopping `node{:bravo:myTestCluster-0}`"
);
}
public void testReleased() {
BuildResult result = getTestClustersRunner("testReleased").build();
assertTaskSuccessful(result, ":testReleased");
assertStartedAndStoppedOnce(result, "releasedVersionDefault-0");
assertStartedAndStoppedOnce(result, "releasedVersionOSS-0");
assertStartedAndStoppedOnce(result, "releasedVersionIntegTest-0");
}
public void testIncremental() {
BuildResult result = getTestClustersRunner("clean", ":user1").build();
assertTaskSuccessful(result, ":user1");
assertStartedAndStoppedOnce(result);
result = getTestClustersRunner(":user1").build();
assertTaskSuccessful(result, ":user1");
assertStartedAndStoppedOnce(result);
result = getTestClustersRunner("clean", ":user1").build();
assertTaskSuccessful(result, ":user1");
assertStartedAndStoppedOnce(result);
assertStartedAndStoppedOnce(result);
}
public void testUseClusterByFailingOne() {
BuildResult result = getTestClustersRunner(":itAlwaysFails").buildAndFail();
assertTaskFailed(result, ":itAlwaysFails");
assertStartedAndStoppedOnce(result);
assertOutputContains(
result.getOutput(),
"Stopping `node{::myTestCluster-0}`, tailLogs: true",
"Execution failed for task ':itAlwaysFails'."
);
}
public void testUseClusterByFailingDependency() {
BuildResult result = getTestClustersRunner(":dependsOnFailed").buildAndFail();
assertTaskFailed(result, ":itAlwaysFails");
assertNull(result.task(":dependsOnFailed"));
assertStartedAndStoppedOnce(result);
assertOutputContains(
result.getOutput(),
"Stopping `node{::myTestCluster-0}`, tailLogs: true",
"Execution failed for task ':itAlwaysFails'."
);
}
public void testConfigurationLocked() {
BuildResult result = getTestClustersRunner(":illegalConfigAlter").buildAndFail();
assertTaskFailed(result, ":illegalConfigAlter");
assertOutputContains(
result.getOutput(),
"Configuration for node{::myTestCluster-0} can not be altered, already locked"
);
}
@Ignore // https://github.com/elastic/elasticsearch/issues/41256
public void testMultiNode() {
BuildResult result = getTestClustersRunner(":multiNode").build();
assertTaskSuccessful(result, ":multiNode");
assertStartedAndStoppedOnce(result, "multiNode-0");
assertStartedAndStoppedOnce(result, "multiNode-1");
assertStartedAndStoppedOnce(result, "multiNode-2");
}
public void testPluginInstalled() {
BuildResult result = getTestClustersRunner(":printLog").build();
assertTaskSuccessful(result, ":printLog");
assertStartedAndStoppedOnce(result);
assertOutputContains(result.getOutput(), "-> Installed dummy");
assertOutputContains(result.getOutput(), "loaded plugin [dummy]");
}
private void assertNotStarted(BuildResult result) {
assertOutputDoesNotContain(
result.getOutput(),
"Starting ",
"Stopping "
);
}
private GradleRunner getTestClustersRunner(String... tasks) {
String[] arguments = Arrays.copyOf(tasks, tasks.length + 3);
arguments[tasks.length] = "-s";
arguments[tasks.length + 1] = "-i";
arguments[tasks.length + 2] = "-Dlocal.repo.path=" + getLocalTestRepoPath();
return runner.withArguments(arguments);
}
private void assertStartedAndStoppedOnce(BuildResult result, String nodeName) {
assertOutputOnlyOnce(
result.getOutput(),
"Starting `node{::" + nodeName + "}`",
"Stopping `node{::" + nodeName + "}`"
);
}
private void assertStartedAndStoppedOnce(BuildResult result) {
assertStartedAndStoppedOnce(result, "myTestCluster-0");
}
}

View File

@ -14,12 +14,6 @@ dependencies {
repositories {
jcenter()
repositories {
maven {
name "local-repo"
url System.getProperty("local.repo.path")
}
}
}
// todo remove offending rules
@ -30,6 +24,8 @@ jarHell.enabled = false
// we don't have tests for now
test.enabled = false
thirdPartyAudit.enabled = false
// This requires an additional Jar not part of build-tools
loggerUsageCheck.enabled = false
task hello {
doFirst {

View File

@ -1,29 +0,0 @@
plugins {
id 'java'
id 'elasticsearch.build'
}
dependencyLicenses.enabled = false
dependenciesInfo.enabled = false
forbiddenApisMain.enabled = false
forbiddenApisTest.enabled = false
thirdPartyAudit.enabled = false
ext.licenseFile = file("$buildDir/dummy/license")
ext.noticeFile = file("$buildDir/dummy/notice")
repositories {
jcenter()
repositories {
maven {
name "local"
url System.getProperty("local.repo.path")
}
}
}
dependencies {
// Needed for the JarHell task
testCompile ("org.elasticsearch.test:framework:${versions.elasticsearch}")
// causes jar hell with local sources
compile "org.apache.logging.log4j:log4j-api:${versions.log4j}"
}

View File

@ -1,7 +0,0 @@
package org.apache.logging.log4j;
// Jar Hell !
public class Logger {
}

View File

@ -1,142 +0,0 @@
plugins {
id 'elasticsearch.testclusters'
id 'base'
}
allprojects { all ->
repositories {
flatDir {
dir System.getProperty("test.local-test-downloads-path")
}
maven {
name "local"
url System.getProperty("local.repo.path")
}
String luceneSnapshotRevision = System.getProperty("test.lucene-snapshot-revision")
if (luceneSnapshotRevision != null) {
maven {
name "lucene-snapshots"
url "https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/" + luceneSnapshotRevision
}
}
}
if (project == rootProject || project.name == "alpha" || project.name == "bravo") {
apply plugin: 'elasticsearch.testclusters'
all.testClusters {
myTestCluster {
testDistribution = 'DEFAULT'
version = System.getProperty("test.version_under_test")
javaHome = file(System.getProperty('java.home'))
plugin file("${project(":dummyPlugin").buildDir}/distributions/dummy-${System.getProperty("test.version_under_test")}.zip")
}
}
task user1 {
useCluster testClusters.myTestCluster
doFirst {
println "$path: Cluster running @ ${testClusters.myTestCluster.httpSocketURI}"
}
}
task user2 {
useCluster testClusters.myTestCluster
doFirst {
println "$path: Cluster running @ ${testClusters.myTestCluster.httpSocketURI}"
}
}
syncTestClustersArtifacts {
dependsOn ":dummyPlugin:bundlePlugin"
}
}
}
testClusters {
multiNode {
version = System.getProperty("test.version_under_test")
testDistribution = 'DEFAULT'
javaHome = file(System.getProperty('java.home'))
numberOfNodes = 3
}
releasedVersionDefault {
version = "7.0.0"
testDistribution = 'DEFAULT'
javaHome = file(System.getProperty('java.home'))
}
releasedVersionOSS {
version = "7.0.0"
testDistribution = 'OSS'
javaHome = file(System.getProperty('java.home'))
}
releasedVersionIntegTest {
version = "7.0.0"
testDistribution = 'INTEG_TEST'
javaHome = file(System.getProperty('java.home'))
}
}
task multiNode {
useCluster testClusters.multiNode
doFirst {
println "$path: Cluster running @ ${testClusters.multiNode.httpSocketURI}"
}
}
task testReleased {
useCluster testClusters.releasedVersionDefault
useCluster testClusters.releasedVersionOSS
useCluster testClusters.releasedVersionIntegTest
doFirst {
println "$path: Cluster running @ ${testClusters.releasedVersionDefault.httpSocketURI}"
println "$path: Cluster running @ ${testClusters.releasedVersionOSS.httpSocketURI}"
println "$path: Cluster running @ ${testClusters.releasedVersionIntegTest.httpSocketURI}"
}
}
task printLog {
useCluster testClusters.myTestCluster
doFirst {
println "$path: Cluster running @ ${testClusters.myTestCluster.httpSocketURI}"
testClusters.myTestCluster.singleNode().logLines().each {
println it
}
}
}
task upToDate1 {
useCluster testClusters.myTestCluster
outputs.upToDateWhen { true }
doLast {
println "Some task action"
}
}
task skipped1 {
enabled = false
useCluster testClusters.myTestCluster
}
task skipped2 {
enabled = false
useCluster testClusters.myTestCluster
}
task itAlwaysFails {
doLast {
throw new GradleException("Task 1 failed!")
}
useCluster testClusters.myTestCluster
}
task dependsOnFailed {
dependsOn itAlwaysFails
useCluster testClusters.myTestCluster
}
task illegalConfigAlter {
useCluster testClusters.myTestCluster
doFirst {
println "Going to alter configuration after use"
testClusters.myTestCluster.testDistribution = 'OSS'
}
}

View File

@ -1,11 +0,0 @@
apply plugin: 'elasticsearch.esplugin'
version = System.getProperty("test.version_under_test")
esplugin {
name 'dummy'
description 'A dummy plugin used for testing'
classname 'DummyPlugin'
licenseFile rootProject.file('empty.txt')
noticeFile rootProject.file('empty.txt')
}

View File

@ -1,5 +0,0 @@
include 'dummyPlugin'
include ':alpha'
include ':bravo'
include ':charlie'
include 'dummyPlugin'

View File

@ -67,7 +67,9 @@ dependencies {
testCompile project(":rest-api-spec")
// Needed for serialization tests:
// (In order to serialize a server side class to a client side class or the other way around)
testCompile project(':x-pack:plugin:core')
testCompile(project(':x-pack:plugin:core')) {
exclude group: 'org.elasticsearch', module: 'elasticsearch-rest-high-level-client'
}
restSpec project(':rest-api-spec')
}
@ -95,6 +97,7 @@ forbiddenApisMain {
addSignatureFiles 'http-signatures'
signaturesFiles += files('src/main/resources/forbidden/rest-high-level-signatures.txt')
}
File nodeCert = file("./testnode.crt")
File nodeTrustStore = file("./testnode.jks")
File pkiTrustCert = file("./src/test/resources/org/elasticsearch/client/security/delegate_pki/testRootCA.crt")

View File

@ -37,6 +37,7 @@ import org.elasticsearch.client.indexlifecycle.StopILMRequest;
import org.elasticsearch.client.slm.DeleteSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.slm.ExecuteSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.slm.ExecuteSnapshotLifecyclePolicyResponse;
import org.elasticsearch.client.slm.ExecuteSnapshotLifecycleRetentionRequest;
import org.elasticsearch.client.slm.GetSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.slm.GetSnapshotLifecyclePolicyResponse;
import org.elasticsearch.client.slm.GetSnapshotLifecycleStatsRequest;
@ -467,6 +468,44 @@ public class IndexLifecycleClient {
options, ExecuteSnapshotLifecyclePolicyResponse::fromXContent, listener, emptySet());
}
/**
* Execute snapshot lifecycle retention
* See <pre>
* https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/
* java-rest-high-ilm-slm-execute-snapshot-lifecycle-retention.html
* </pre>
* for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse executeSnapshotLifecycleRetention(ExecuteSnapshotLifecycleRetentionRequest request,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, IndexLifecycleRequestConverters::executeSnapshotLifecycleRetention,
options, AcknowledgedResponse::fromXContent, emptySet());
}
/**
* Asynchronously execute snapshot lifecycle retention
* See <pre>
* https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current/
* java-rest-high-ilm-slm-execute-snapshot-lifecycle-retention.html
* </pre>
* for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable executeSnapshotLifecycleRetentionAsync(
ExecuteSnapshotLifecycleRetentionRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
request, IndexLifecycleRequestConverters::executeSnapshotLifecycleRetention,
options, AcknowledgedResponse::fromXContent, listener, emptySet());
}
/**
* Retrieve snapshot lifecycle statistics.
* See <pre>

View File

@ -34,6 +34,7 @@ import org.elasticsearch.client.indexlifecycle.StartILMRequest;
import org.elasticsearch.client.indexlifecycle.StopILMRequest;
import org.elasticsearch.client.slm.DeleteSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.slm.ExecuteSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.slm.ExecuteSnapshotLifecycleRetentionRequest;
import org.elasticsearch.client.slm.GetSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.slm.GetSnapshotLifecycleStatsRequest;
import org.elasticsearch.client.slm.PutSnapshotLifecyclePolicyRequest;
@ -217,6 +218,18 @@ final class IndexLifecycleRequestConverters {
return request;
}
static Request executeSnapshotLifecycleRetention(ExecuteSnapshotLifecycleRetentionRequest executeSnapshotLifecycleRetentionRequest) {
Request request = new Request(HttpPost.METHOD_NAME,
new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_slm/_execute_retention")
.build());
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(executeSnapshotLifecycleRetentionRequest.masterNodeTimeout());
params.withTimeout(executeSnapshotLifecycleRetentionRequest.timeout());
request.addParameters(params.asMap());
return request;
}
static Request getSnapshotLifecycleStats(GetSnapshotLifecycleStatsRequest getSnapshotLifecycleStatsRequest) {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_slm/stats").build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);

View File

@ -191,8 +191,6 @@ final class RequestConverters {
metadata.field("version_type", "external");
} else if (versionType == VersionType.EXTERNAL_GTE) {
metadata.field("version_type", "external_gte");
} else if (versionType == VersionType.FORCE) {
metadata.field("version_type", "force");
}
}

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.util;
package org.elasticsearch.client.common;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.XContentParser;
@ -46,6 +46,14 @@ public final class TimeUtil {
"unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]");
}
/**
* Parse out an Instant object given the current parser and field name.
*
* @param parser current XContentParser
* @param fieldName the field's preferred name (utilized in exception)
* @return parsed Instant object
* @throws IOException from XContentParser
*/
public static Instant parseTimeFieldToInstant(XContentParser parser, String fieldName) throws IOException {
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
return Instant.ofEpochMilli(parser.longValue());

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.client.ml.calendars;
import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;

View File

@ -0,0 +1,245 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
public class Classification implements DataFrameAnalysis {
public static Classification fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public static Builder builder(String dependentVariable) {
return new Builder(dependentVariable);
}
public static final ParseField NAME = new ParseField("classification");
static final ParseField DEPENDENT_VARIABLE = new ParseField("dependent_variable");
static final ParseField LAMBDA = new ParseField("lambda");
static final ParseField GAMMA = new ParseField("gamma");
static final ParseField ETA = new ParseField("eta");
static final ParseField MAXIMUM_NUMBER_TREES = new ParseField("maximum_number_trees");
static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction");
static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name");
static final ParseField TRAINING_PERCENT = new ParseField("training_percent");
private static final ConstructingObjectParser<Classification, Void> PARSER =
new ConstructingObjectParser<>(
NAME.getPreferredName(),
true,
a -> new Classification(
(String) a[0],
(Double) a[1],
(Double) a[2],
(Double) a[3],
(Integer) a[4],
(Double) a[5],
(String) a[6],
(Double) a[7]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), DEPENDENT_VARIABLE);
PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LAMBDA);
PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), GAMMA);
PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA);
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAXIMUM_NUMBER_TREES);
PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_BAG_FRACTION);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PREDICTION_FIELD_NAME);
PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), TRAINING_PERCENT);
}
private final String dependentVariable;
private final Double lambda;
private final Double gamma;
private final Double eta;
private final Integer maximumNumberTrees;
private final Double featureBagFraction;
private final String predictionFieldName;
private final Double trainingPercent;
private Classification(String dependentVariable, @Nullable Double lambda, @Nullable Double gamma, @Nullable Double eta,
@Nullable Integer maximumNumberTrees, @Nullable Double featureBagFraction, @Nullable String predictionFieldName,
@Nullable Double trainingPercent) {
this.dependentVariable = Objects.requireNonNull(dependentVariable);
this.lambda = lambda;
this.gamma = gamma;
this.eta = eta;
this.maximumNumberTrees = maximumNumberTrees;
this.featureBagFraction = featureBagFraction;
this.predictionFieldName = predictionFieldName;
this.trainingPercent = trainingPercent;
}
@Override
public String getName() {
return NAME.getPreferredName();
}
public String getDependentVariable() {
return dependentVariable;
}
public Double getLambda() {
return lambda;
}
public Double getGamma() {
return gamma;
}
public Double getEta() {
return eta;
}
public Integer getMaximumNumberTrees() {
return maximumNumberTrees;
}
public Double getFeatureBagFraction() {
return featureBagFraction;
}
public String getPredictionFieldName() {
return predictionFieldName;
}
public Double getTrainingPercent() {
return trainingPercent;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable);
if (lambda != null) {
builder.field(LAMBDA.getPreferredName(), lambda);
}
if (gamma != null) {
builder.field(GAMMA.getPreferredName(), gamma);
}
if (eta != null) {
builder.field(ETA.getPreferredName(), eta);
}
if (maximumNumberTrees != null) {
builder.field(MAXIMUM_NUMBER_TREES.getPreferredName(), maximumNumberTrees);
}
if (featureBagFraction != null) {
builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction);
}
if (predictionFieldName != null) {
builder.field(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName);
}
if (trainingPercent != null) {
builder.field(TRAINING_PERCENT.getPreferredName(), trainingPercent);
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(dependentVariable, lambda, gamma, eta, maximumNumberTrees, featureBagFraction, predictionFieldName,
trainingPercent);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Classification that = (Classification) o;
return Objects.equals(dependentVariable, that.dependentVariable)
&& Objects.equals(lambda, that.lambda)
&& Objects.equals(gamma, that.gamma)
&& Objects.equals(eta, that.eta)
&& Objects.equals(maximumNumberTrees, that.maximumNumberTrees)
&& Objects.equals(featureBagFraction, that.featureBagFraction)
&& Objects.equals(predictionFieldName, that.predictionFieldName)
&& Objects.equals(trainingPercent, that.trainingPercent);
}
@Override
public String toString() {
return Strings.toString(this);
}
public static class Builder {
private String dependentVariable;
private Double lambda;
private Double gamma;
private Double eta;
private Integer maximumNumberTrees;
private Double featureBagFraction;
private String predictionFieldName;
private Double trainingPercent;
private Builder(String dependentVariable) {
this.dependentVariable = Objects.requireNonNull(dependentVariable);
}
public Builder setLambda(Double lambda) {
this.lambda = lambda;
return this;
}
public Builder setGamma(Double gamma) {
this.gamma = gamma;
return this;
}
public Builder setEta(Double eta) {
this.eta = eta;
return this;
}
public Builder setMaximumNumberTrees(Integer maximumNumberTrees) {
this.maximumNumberTrees = maximumNumberTrees;
return this;
}
public Builder setFeatureBagFraction(Double featureBagFraction) {
this.featureBagFraction = featureBagFraction;
return this;
}
public Builder setPredictionFieldName(String predictionFieldName) {
this.predictionFieldName = predictionFieldName;
return this;
}
public Builder setTrainingPercent(Double trainingPercent) {
this.trainingPercent = trainingPercent;
return this;
}
public Classification build() {
return new Classification(dependentVariable, lambda, gamma, eta, maximumNumberTrees, featureBagFraction, predictionFieldName,
trainingPercent);
}
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.Version;
import org.elasticsearch.client.transform.transforms.util.TimeUtil;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;

View File

@ -36,6 +36,10 @@ public class MlDataFrameAnalysisNamedXContentProvider implements NamedXContentPr
new NamedXContentRegistry.Entry(
DataFrameAnalysis.class,
Regression.NAME,
(p, c) -> Regression.fromXContent(p)));
(p, c) -> Regression.fromXContent(p)),
new NamedXContentRegistry.Entry(
DataFrameAnalysis.class,
Classification.NAME,
(p, c) -> Classification.fromXContent(p)));
}
}

View File

@ -49,16 +49,19 @@ public class Regression implements DataFrameAnalysis {
static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name");
static final ParseField TRAINING_PERCENT = new ParseField("training_percent");
private static final ConstructingObjectParser<Regression, Void> PARSER = new ConstructingObjectParser<>(NAME.getPreferredName(), true,
a -> new Regression(
(String) a[0],
(Double) a[1],
(Double) a[2],
(Double) a[3],
(Integer) a[4],
(Double) a[5],
(String) a[6],
(Double) a[7]));
private static final ConstructingObjectParser<Regression, Void> PARSER =
new ConstructingObjectParser<>(
NAME.getPreferredName(),
true,
a -> new Regression(
(String) a[0],
(Double) a[1],
(Double) a[2],
(Double) a[3],
(Integer) a[4],
(Double) a[5],
(String) a[6],
(Double) a[7]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), DEPENDENT_VARIABLE);

View File

@ -18,7 +18,9 @@
*/
package org.elasticsearch.client.ml.dataframe.evaluation;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.Classification;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification;
@ -41,6 +43,7 @@ public class MlEvaluationNamedXContentProvider implements NamedXContentProvider
// Evaluations
new NamedXContentRegistry.Entry(
Evaluation.class, new ParseField(BinarySoftClassification.NAME), BinarySoftClassification::fromXContent),
new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(Classification.NAME), Classification::fromXContent),
new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(Regression.NAME), Regression::fromXContent),
// Evaluation metrics
new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(AucRocMetric.NAME), AucRocMetric::fromXContent),
@ -48,6 +51,10 @@ public class MlEvaluationNamedXContentProvider implements NamedXContentProvider
new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(RecallMetric.NAME), RecallMetric::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.class, new ParseField(ConfusionMatrixMetric.NAME), ConfusionMatrixMetric::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.class,
new ParseField(MulticlassConfusionMatrixMetric.NAME),
MulticlassConfusionMatrixMetric::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.class, new ParseField(MeanSquaredErrorMetric.NAME), MeanSquaredErrorMetric::fromXContent),
new NamedXContentRegistry.Entry(
@ -60,10 +67,14 @@ public class MlEvaluationNamedXContentProvider implements NamedXContentProvider
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(RecallMetric.NAME), RecallMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(RSquaredMetric.NAME), RSquaredMetric.Result::fromXContent),
EvaluationMetric.Result.class, new ParseField(ConfusionMatrixMetric.NAME), ConfusionMatrixMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class,
new ParseField(MulticlassConfusionMatrixMetric.NAME),
MulticlassConfusionMatrixMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(MeanSquaredErrorMetric.NAME), MeanSquaredErrorMetric.Result::fromXContent),
new NamedXContentRegistry.Entry(
EvaluationMetric.Result.class, new ParseField(ConfusionMatrixMetric.NAME), ConfusionMatrixMetric.Result::fromXContent));
EvaluationMetric.Result.class, new ParseField(RSquaredMetric.NAME), RSquaredMetric.Result::fromXContent));
}
}

View File

@ -0,0 +1,132 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.classification;
import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
/**
* Evaluation of classification results.
*/
public class Classification implements Evaluation {
public static final String NAME = "classification";
private static final ParseField ACTUAL_FIELD = new ParseField("actual_field");
private static final ParseField PREDICTED_FIELD = new ParseField("predicted_field");
private static final ParseField METRICS = new ParseField("metrics");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<Classification, Void> PARSER = new ConstructingObjectParser<>(
NAME, true, a -> new Classification((String) a[0], (String) a[1], (List<EvaluationMetric>) a[2]));
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), ACTUAL_FIELD);
PARSER.declareString(ConstructingObjectParser.constructorArg(), PREDICTED_FIELD);
PARSER.declareNamedObjects(ConstructingObjectParser.optionalConstructorArg(),
(p, c, n) -> p.namedObject(EvaluationMetric.class, n, c), METRICS);
}
public static Classification fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
/**
* The field containing the actual value
* The value of this field is assumed to be numeric
*/
private final String actualField;
/**
* The field containing the predicted value
* The value of this field is assumed to be numeric
*/
private final String predictedField;
/**
* The list of metrics to calculate
*/
private final List<EvaluationMetric> metrics;
public Classification(String actualField, String predictedField) {
this(actualField, predictedField, (List<EvaluationMetric>)null);
}
public Classification(String actualField, String predictedField, EvaluationMetric... metrics) {
this(actualField, predictedField, Arrays.asList(metrics));
}
public Classification(String actualField, String predictedField, @Nullable List<EvaluationMetric> metrics) {
this.actualField = Objects.requireNonNull(actualField);
this.predictedField = Objects.requireNonNull(predictedField);
if (metrics != null) {
metrics.sort(Comparator.comparing(EvaluationMetric::getName));
}
this.metrics = metrics;
}
@Override
public String getName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(ACTUAL_FIELD.getPreferredName(), actualField);
builder.field(PREDICTED_FIELD.getPreferredName(), predictedField);
if (metrics != null) {
builder.startObject(METRICS.getPreferredName());
for (EvaluationMetric metric : metrics) {
builder.field(metric.getName(), metric);
}
builder.endObject();
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Classification that = (Classification) o;
return Objects.equals(that.actualField, this.actualField)
&& Objects.equals(that.predictedField, this.predictedField)
&& Objects.equals(that.metrics, this.metrics);
}
@Override
public int hashCode() {
return Objects.hash(actualField, predictedField, metrics);
}
}

View File

@ -0,0 +1,164 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.classification;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Calculates the multiclass confusion matrix.
*/
public class MulticlassConfusionMatrixMetric implements EvaluationMetric {
public static final String NAME = "multiclass_confusion_matrix";
public static final ParseField SIZE = new ParseField("size");
private static final ConstructingObjectParser<MulticlassConfusionMatrixMetric, Void> PARSER = createParser();
private static ConstructingObjectParser<MulticlassConfusionMatrixMetric, Void> createParser() {
ConstructingObjectParser<MulticlassConfusionMatrixMetric, Void> parser =
new ConstructingObjectParser<>(NAME, true, args -> new MulticlassConfusionMatrixMetric((Integer) args[0]));
parser.declareInt(optionalConstructorArg(), SIZE);
return parser;
}
public static MulticlassConfusionMatrixMetric fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private final Integer size;
public MulticlassConfusionMatrixMetric() {
this(null);
}
public MulticlassConfusionMatrixMetric(@Nullable Integer size) {
this.size = size;
}
@Override
public String getName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (size != null) {
builder.field(SIZE.getPreferredName(), size);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MulticlassConfusionMatrixMetric that = (MulticlassConfusionMatrixMetric) o;
return Objects.equals(this.size, that.size);
}
@Override
public int hashCode() {
return Objects.hash(size);
}
public static class Result implements EvaluationMetric.Result {
private static final ParseField CONFUSION_MATRIX = new ParseField("confusion_matrix");
private static final ParseField OTHER_CLASSES_COUNT = new ParseField("_other_");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<Result, Void> PARSER =
new ConstructingObjectParser<>(
"multiclass_confusion_matrix_result", true, a -> new Result((Map<String, Map<String, Long>>) a[0], (long) a[1]));
static {
PARSER.declareObject(
constructorArg(),
(p, c) -> p.map(TreeMap::new, p2 -> p2.map(TreeMap::new, XContentParser::longValue)),
CONFUSION_MATRIX);
PARSER.declareLong(constructorArg(), OTHER_CLASSES_COUNT);
}
public static Result fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
// Immutable
private final Map<String, Map<String, Long>> confusionMatrix;
private final long otherClassesCount;
public Result(Map<String, Map<String, Long>> confusionMatrix, long otherClassesCount) {
this.confusionMatrix = Collections.unmodifiableMap(Objects.requireNonNull(confusionMatrix));
this.otherClassesCount = otherClassesCount;
}
@Override
public String getMetricName() {
return NAME;
}
public Map<String, Map<String, Long>> getConfusionMatrix() {
return confusionMatrix;
}
public long getOtherClassesCount() {
return otherClassesCount;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(CONFUSION_MATRIX.getPreferredName(), confusionMatrix);
builder.field(OTHER_CLASSES_COUNT.getPreferredName(), otherClassesCount);
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Result that = (Result) o;
return Objects.equals(this.confusionMatrix, that.confusionMatrix)
&& this.otherClassesCount == that.otherClassesCount;
}
@Override
public int hashCode() {
return Objects.hash(confusionMatrix, otherClassesCount);
}
}
}

View File

@ -19,6 +19,10 @@
package org.elasticsearch.client.ml.inference;
import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel;
import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.Ensemble;
import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.OutputAggregator;
import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedMode;
import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedSum;
import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree;
import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding;
import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding;
@ -47,6 +51,15 @@ public class MlInferenceNamedXContentProvider implements NamedXContentProvider {
// Model
namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Tree.NAME), Tree::fromXContent));
namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Ensemble.NAME), Ensemble::fromXContent));
// Aggregating output
namedXContent.add(new NamedXContentRegistry.Entry(OutputAggregator.class,
new ParseField(WeightedMode.NAME),
WeightedMode::fromXContent));
namedXContent.add(new NamedXContentRegistry.Entry(OutputAggregator.class,
new ParseField(WeightedSum.NAME),
WeightedSum::fromXContent));
return namedXContent;
}

View File

@ -0,0 +1,34 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference;
import org.elasticsearch.common.xcontent.ToXContentObject;
/**
* Simple interface for XContent Objects that are named.
*
* This affords more general handling when serializing and de-serializing this type of XContent when it is used in a NamedObjects
* parser.
*/
public interface NamedXContentObject extends ToXContentObject {
/**
* @return The name of the XContentObject that is to be serialized
*/
String getName();
}

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.List;
public final class NamedXContentObjectHelper {
private NamedXContentObjectHelper() {}
public static XContentBuilder writeNamedObjects(XContentBuilder builder,
ToXContent.Params params,
boolean useExplicitOrder,
String namedObjectsName,
List<? extends NamedXContentObject> namedObjects) throws IOException {
if (useExplicitOrder) {
builder.startArray(namedObjectsName);
} else {
builder.startObject(namedObjectsName);
}
for (NamedXContentObject object : namedObjects) {
if (useExplicitOrder) {
builder.startObject();
}
builder.field(object.getName(), object, params);
if (useExplicitOrder) {
builder.endObject();
}
}
if (useExplicitOrder) {
builder.endArray();
} else {
builder.endObject();
}
return builder;
}
}

View File

@ -0,0 +1,287 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference;
import org.elasticsearch.Version;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.time.Instant;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
public class TrainedModelConfig implements ToXContentObject {
public static final String NAME = "trained_model_doc";
public static final ParseField MODEL_ID = new ParseField("model_id");
public static final ParseField CREATED_BY = new ParseField("created_by");
public static final ParseField VERSION = new ParseField("version");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField CREATED_TIME = new ParseField("created_time");
public static final ParseField MODEL_VERSION = new ParseField("model_version");
public static final ParseField DEFINITION = new ParseField("definition");
public static final ParseField MODEL_TYPE = new ParseField("model_type");
public static final ParseField METADATA = new ParseField("metadata");
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(NAME,
true,
TrainedModelConfig.Builder::new);
static {
PARSER.declareString(TrainedModelConfig.Builder::setModelId, MODEL_ID);
PARSER.declareString(TrainedModelConfig.Builder::setCreatedBy, CREATED_BY);
PARSER.declareString(TrainedModelConfig.Builder::setVersion, VERSION);
PARSER.declareString(TrainedModelConfig.Builder::setDescription, DESCRIPTION);
PARSER.declareField(TrainedModelConfig.Builder::setCreatedTime,
(p, c) -> TimeUtil.parseTimeFieldToInstant(p, CREATED_TIME.getPreferredName()),
CREATED_TIME,
ObjectParser.ValueType.VALUE);
PARSER.declareLong(TrainedModelConfig.Builder::setModelVersion, MODEL_VERSION);
PARSER.declareString(TrainedModelConfig.Builder::setModelType, MODEL_TYPE);
PARSER.declareObject(TrainedModelConfig.Builder::setMetadata, (p, c) -> p.map(), METADATA);
PARSER.declareObject(TrainedModelConfig.Builder::setDefinition,
(p, c) -> TrainedModelDefinition.fromXContent(p),
DEFINITION);
}
public static TrainedModelConfig.Builder fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final String modelId;
private final String createdBy;
private final Version version;
private final String description;
private final Instant createdTime;
private final Long modelVersion;
private final String modelType;
private final Map<String, Object> metadata;
private final TrainedModelDefinition definition;
TrainedModelConfig(String modelId,
String createdBy,
Version version,
String description,
Instant createdTime,
Long modelVersion,
String modelType,
TrainedModelDefinition definition,
Map<String, Object> metadata) {
this.modelId = modelId;
this.createdBy = createdBy;
this.version = version;
this.createdTime = Instant.ofEpochMilli(createdTime.toEpochMilli());
this.modelType = modelType;
this.definition = definition;
this.description = description;
this.metadata = metadata == null ? null : Collections.unmodifiableMap(metadata);
this.modelVersion = modelVersion;
}
public String getModelId() {
return modelId;
}
public String getCreatedBy() {
return createdBy;
}
public Version getVersion() {
return version;
}
public String getDescription() {
return description;
}
public Instant getCreatedTime() {
return createdTime;
}
public Long getModelVersion() {
return modelVersion;
}
public String getModelType() {
return modelType;
}
public Map<String, Object> getMetadata() {
return metadata;
}
public TrainedModelDefinition getDefinition() {
return definition;
}
public static Builder builder() {
return new Builder();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (modelId != null) {
builder.field(MODEL_ID.getPreferredName(), modelId);
}
if (createdBy != null) {
builder.field(CREATED_BY.getPreferredName(), createdBy);
}
if (version != null) {
builder.field(VERSION.getPreferredName(), version.toString());
}
if (description != null) {
builder.field(DESCRIPTION.getPreferredName(), description);
}
if (createdTime != null) {
builder.timeField(CREATED_TIME.getPreferredName(), CREATED_TIME.getPreferredName() + "_string", createdTime.toEpochMilli());
}
if (modelVersion != null) {
builder.field(MODEL_VERSION.getPreferredName(), modelVersion);
}
if (modelType != null) {
builder.field(MODEL_TYPE.getPreferredName(), modelType);
}
if (definition != null) {
builder.field(DEFINITION.getPreferredName(), definition);
}
if (metadata != null) {
builder.field(METADATA.getPreferredName(), metadata);
}
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TrainedModelConfig that = (TrainedModelConfig) o;
return Objects.equals(modelId, that.modelId) &&
Objects.equals(createdBy, that.createdBy) &&
Objects.equals(version, that.version) &&
Objects.equals(description, that.description) &&
Objects.equals(createdTime, that.createdTime) &&
Objects.equals(modelVersion, that.modelVersion) &&
Objects.equals(modelType, that.modelType) &&
Objects.equals(definition, that.definition) &&
Objects.equals(metadata, that.metadata);
}
@Override
public int hashCode() {
return Objects.hash(modelId,
createdBy,
version,
createdTime,
modelType,
definition,
description,
metadata,
modelVersion);
}
public static class Builder {
private String modelId;
private String createdBy;
private Version version;
private String description;
private Instant createdTime;
private Long modelVersion;
private String modelType;
private Map<String, Object> metadata;
private TrainedModelDefinition.Builder definition;
public Builder setModelId(String modelId) {
this.modelId = modelId;
return this;
}
private Builder setCreatedBy(String createdBy) {
this.createdBy = createdBy;
return this;
}
private Builder setVersion(Version version) {
this.version = version;
return this;
}
private Builder setVersion(String version) {
return this.setVersion(Version.fromString(version));
}
public Builder setDescription(String description) {
this.description = description;
return this;
}
private Builder setCreatedTime(Instant createdTime) {
this.createdTime = createdTime;
return this;
}
public Builder setModelVersion(Long modelVersion) {
this.modelVersion = modelVersion;
return this;
}
public Builder setModelType(String modelType) {
this.modelType = modelType;
return this;
}
public Builder setMetadata(Map<String, Object> metadata) {
this.metadata = metadata;
return this;
}
public Builder setDefinition(TrainedModelDefinition.Builder definition) {
this.definition = definition;
return this;
}
public TrainedModelConfig build() {
return new TrainedModelConfig(
modelId,
createdBy,
version,
description,
createdTime,
modelVersion,
modelType,
definition == null ? null : definition.build(),
metadata);
}
}
}

View File

@ -0,0 +1,137 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference;
import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor;
import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
public class TrainedModelDefinition implements ToXContentObject {
public static final String NAME = "trained_model_doc";
public static final ParseField TRAINED_MODEL = new ParseField("trained_model");
public static final ParseField PREPROCESSORS = new ParseField("preprocessors");
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(NAME,
true,
TrainedModelDefinition.Builder::new);
static {
PARSER.declareNamedObjects(TrainedModelDefinition.Builder::setTrainedModel,
(p, c, n) -> p.namedObject(TrainedModel.class, n, null),
(modelDocBuilder) -> { /* Noop does not matter client side*/ },
TRAINED_MODEL);
PARSER.declareNamedObjects(TrainedModelDefinition.Builder::setPreProcessors,
(p, c, n) -> p.namedObject(PreProcessor.class, n, null),
(trainedModelDefBuilder) -> {/* Does not matter client side*/ },
PREPROCESSORS);
}
public static TrainedModelDefinition.Builder fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private final TrainedModel trainedModel;
private final List<PreProcessor> preProcessors;
TrainedModelDefinition(TrainedModel trainedModel, List<PreProcessor> preProcessors) {
this.trainedModel = trainedModel;
this.preProcessors = preProcessors == null ? Collections.emptyList() : Collections.unmodifiableList(preProcessors);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
NamedXContentObjectHelper.writeNamedObjects(builder,
params,
false,
TRAINED_MODEL.getPreferredName(),
Collections.singletonList(trainedModel));
NamedXContentObjectHelper.writeNamedObjects(builder,
params,
true,
PREPROCESSORS.getPreferredName(),
preProcessors);
builder.endObject();
return builder;
}
public TrainedModel getTrainedModel() {
return trainedModel;
}
public List<PreProcessor> getPreProcessors() {
return preProcessors;
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TrainedModelDefinition that = (TrainedModelDefinition) o;
return Objects.equals(trainedModel, that.trainedModel) &&
Objects.equals(preProcessors, that.preProcessors) ;
}
@Override
public int hashCode() {
return Objects.hash(trainedModel, preProcessors);
}
public static class Builder {
private List<PreProcessor> preProcessors;
private TrainedModel trainedModel;
public Builder setPreProcessors(List<PreProcessor> preProcessors) {
this.preProcessors = preProcessors;
return this;
}
public Builder setTrainedModel(TrainedModel trainedModel) {
this.trainedModel = trainedModel;
return this;
}
private Builder setTrainedModel(List<TrainedModel> trainedModel) {
assert trainedModel.size() == 1;
return setTrainedModel(trainedModel.get(0));
}
public TrainedModelDefinition build() {
return new TrainedModelDefinition(this.trainedModel, this.preProcessors);
}
}
}

View File

@ -18,13 +18,13 @@
*/
package org.elasticsearch.client.ml.inference.preprocessing;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.client.ml.inference.NamedXContentObject;
/**
* Describes a pre-processor for a defined machine learning model
*/
public interface PreProcessor extends ToXContentObject {
public interface PreProcessor extends NamedXContentObject {
/**
* @return The name of the pre-processor

View File

@ -1,8 +1,3 @@
package org.elasticsearch.gradle.precommit;
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.BuildResult;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
@ -21,19 +16,20 @@ import org.gradle.testkit.runner.BuildResult;
* specific language governing permissions and limitations
* under the License.
*/
public class JarHellTaskIT extends GradleIntegrationTestCase {
package org.elasticsearch.client.ml.inference.trainedmodel;
public void testJarHellDetected() {
BuildResult result = getGradleRunner("jarHell")
.withArguments("clean", "precommit", "-s", "-Dlocal.repo.path=" + getLocalTestRepoPath())
.buildAndFail();
import java.util.Locale;
assertTaskFailed(result, ":jarHell");
assertOutputContains(
result.getOutput(),
"java.lang.IllegalStateException: jar hell!",
"class: org.apache.logging.log4j.Logger"
);
public enum TargetType {
REGRESSION, CLASSIFICATION;
public static TargetType fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}

View File

@ -18,11 +18,11 @@
*/
package org.elasticsearch.client.ml.inference.trainedmodel;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.client.ml.inference.NamedXContentObject;
import java.util.List;
public interface TrainedModel extends ToXContentObject {
public interface TrainedModel extends NamedXContentObject {
/**
* @return List of featureNames expected by the model. In the order that they are expected

View File

@ -0,0 +1,188 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.trainedmodel.ensemble;
import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper;
import org.elasticsearch.client.ml.inference.trainedmodel.TargetType;
import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
public class Ensemble implements TrainedModel {
public static final String NAME = "ensemble";
public static final ParseField FEATURE_NAMES = new ParseField("feature_names");
public static final ParseField TRAINED_MODELS = new ParseField("trained_models");
public static final ParseField AGGREGATE_OUTPUT = new ParseField("aggregate_output");
public static final ParseField TARGET_TYPE = new ParseField("target_type");
public static final ParseField CLASSIFICATION_LABELS = new ParseField("classification_labels");
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(
NAME,
true,
Ensemble.Builder::new);
static {
PARSER.declareStringArray(Ensemble.Builder::setFeatureNames, FEATURE_NAMES);
PARSER.declareNamedObjects(Ensemble.Builder::setTrainedModels,
(p, c, n) ->
p.namedObject(TrainedModel.class, n, null),
(ensembleBuilder) -> { /* Noop does not matter client side */ },
TRAINED_MODELS);
PARSER.declareNamedObjects(Ensemble.Builder::setOutputAggregatorFromParser,
(p, c, n) -> p.namedObject(OutputAggregator.class, n, null),
(ensembleBuilder) -> { /* Noop does not matter client side */ },
AGGREGATE_OUTPUT);
PARSER.declareString(Ensemble.Builder::setTargetType, TARGET_TYPE);
PARSER.declareStringArray(Ensemble.Builder::setClassificationLabels, CLASSIFICATION_LABELS);
}
public static Ensemble fromXContent(XContentParser parser) {
return PARSER.apply(parser, null).build();
}
private final List<String> featureNames;
private final List<TrainedModel> models;
private final OutputAggregator outputAggregator;
private final TargetType targetType;
private final List<String> classificationLabels;
Ensemble(List<String> featureNames,
List<TrainedModel> models,
@Nullable OutputAggregator outputAggregator,
TargetType targetType,
@Nullable List<String> classificationLabels) {
this.featureNames = featureNames;
this.models = models;
this.outputAggregator = outputAggregator;
this.targetType = targetType;
this.classificationLabels = classificationLabels;
}
@Override
public List<String> getFeatureNames() {
return featureNames;
}
@Override
public String getName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
if (featureNames != null) {
builder.field(FEATURE_NAMES.getPreferredName(), featureNames);
}
if (models != null) {
NamedXContentObjectHelper.writeNamedObjects(builder, params, true, TRAINED_MODELS.getPreferredName(), models);
}
if (outputAggregator != null) {
NamedXContentObjectHelper.writeNamedObjects(builder,
params,
false,
AGGREGATE_OUTPUT.getPreferredName(),
Collections.singletonList(outputAggregator));
}
if (targetType != null) {
builder.field(TARGET_TYPE.getPreferredName(), targetType);
}
if (classificationLabels != null) {
builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Ensemble that = (Ensemble) o;
return Objects.equals(featureNames, that.featureNames)
&& Objects.equals(models, that.models)
&& Objects.equals(targetType, that.targetType)
&& Objects.equals(classificationLabels, that.classificationLabels)
&& Objects.equals(outputAggregator, that.outputAggregator);
}
@Override
public int hashCode() {
return Objects.hash(featureNames, models, outputAggregator, classificationLabels, targetType);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private List<String> featureNames;
private List<TrainedModel> trainedModels;
private OutputAggregator outputAggregator;
private TargetType targetType;
private List<String> classificationLabels;
public Builder setFeatureNames(List<String> featureNames) {
this.featureNames = featureNames;
return this;
}
public Builder setTrainedModels(List<TrainedModel> trainedModels) {
this.trainedModels = trainedModels;
return this;
}
public Builder setOutputAggregator(OutputAggregator outputAggregator) {
this.outputAggregator = outputAggregator;
return this;
}
public Builder setTargetType(TargetType targetType) {
this.targetType = targetType;
return this;
}
public Builder setClassificationLabels(List<String> classificationLabels) {
this.classificationLabels = classificationLabels;
return this;
}
private void setOutputAggregatorFromParser(List<OutputAggregator> outputAggregators) {
this.setOutputAggregator(outputAggregators.get(0));
}
private void setTargetType(String targetType) {
this.targetType = TargetType.fromString(targetType);
}
public Ensemble build() {
return new Ensemble(featureNames, trainedModels, outputAggregator, targetType, classificationLabels);
}
}
}

View File

@ -0,0 +1,28 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.trainedmodel.ensemble;
import org.elasticsearch.client.ml.inference.NamedXContentObject;
public interface OutputAggregator extends NamedXContentObject {
/**
* @return The name of the output aggregator
*/
String getName();
}

View File

@ -0,0 +1,84 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.trainedmodel.ensemble;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
public class WeightedMode implements OutputAggregator {
public static final String NAME = "weighted_mode";
public static final ParseField WEIGHTS = new ParseField("weights");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<WeightedMode, Void> PARSER = new ConstructingObjectParser<>(
NAME,
true,
a -> new WeightedMode((List<Double>)a[0]));
static {
PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS);
}
public static WeightedMode fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private final List<Double> weights;
public WeightedMode(List<Double> weights) {
this.weights = weights;
}
@Override
public String getName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
if (weights != null) {
builder.field(WEIGHTS.getPreferredName(), weights);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
WeightedMode that = (WeightedMode) o;
return Objects.equals(weights, that.weights);
}
@Override
public int hashCode() {
return Objects.hash(weights);
}
}

View File

@ -0,0 +1,84 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference.trainedmodel.ensemble;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
public class WeightedSum implements OutputAggregator {
public static final String NAME = "weighted_sum";
public static final ParseField WEIGHTS = new ParseField("weights");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<WeightedSum, Void> PARSER = new ConstructingObjectParser<>(
NAME,
true,
a -> new WeightedSum((List<Double>)a[0]));
static {
PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS);
}
public static WeightedSum fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private final List<Double> weights;
public WeightedSum(List<Double> weights) {
this.weights = weights;
}
@Override
public String getName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
if (weights != null) {
builder.field(WEIGHTS.getPreferredName(), weights);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
WeightedSum that = (WeightedSum) o;
return Objects.equals(weights, that.weights);
}
@Override
public int hashCode() {
return Objects.hash(weights);
}
}

View File

@ -18,7 +18,9 @@
*/
package org.elasticsearch.client.ml.inference.trainedmodel.tree;
import org.elasticsearch.client.ml.inference.trainedmodel.TargetType;
import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ObjectParser;
@ -28,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@ -39,12 +40,16 @@ public class Tree implements TrainedModel {
public static final ParseField FEATURE_NAMES = new ParseField("feature_names");
public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure");
public static final ParseField TARGET_TYPE = new ParseField("target_type");
public static final ParseField CLASSIFICATION_LABELS = new ParseField("classification_labels");
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(NAME, true, Builder::new);
static {
PARSER.declareStringArray(Builder::setFeatureNames, FEATURE_NAMES);
PARSER.declareObjectArray(Builder::setNodes, (p, c) -> TreeNode.fromXContent(p), TREE_STRUCTURE);
PARSER.declareString(Builder::setTargetType, TARGET_TYPE);
PARSER.declareStringArray(Builder::setClassificationLabels, CLASSIFICATION_LABELS);
}
public static Tree fromXContent(XContentParser parser) {
@ -53,10 +58,14 @@ public class Tree implements TrainedModel {
private final List<String> featureNames;
private final List<TreeNode> nodes;
private final TargetType targetType;
private final List<String> classificationLabels;
Tree(List<String> featureNames, List<TreeNode> nodes) {
this.featureNames = Collections.unmodifiableList(Objects.requireNonNull(featureNames));
this.nodes = Collections.unmodifiableList(Objects.requireNonNull(nodes));
Tree(List<String> featureNames, List<TreeNode> nodes, TargetType targetType, List<String> classificationLabels) {
this.featureNames = featureNames;
this.nodes = nodes;
this.targetType = targetType;
this.classificationLabels = classificationLabels;
}
@Override
@ -73,11 +82,30 @@ public class Tree implements TrainedModel {
return nodes;
}
@Nullable
public List<String> getClassificationLabels() {
return classificationLabels;
}
public TargetType getTargetType() {
return targetType;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(FEATURE_NAMES.getPreferredName(), featureNames);
builder.field(TREE_STRUCTURE.getPreferredName(), nodes);
if (featureNames != null) {
builder.field(FEATURE_NAMES.getPreferredName(), featureNames);
}
if (nodes != null) {
builder.field(TREE_STRUCTURE.getPreferredName(), nodes);
}
if (classificationLabels != null) {
builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels);
}
if (targetType != null) {
builder.field(TARGET_TYPE.getPreferredName(), targetType.toString());
}
builder.endObject();
return builder;
}
@ -93,12 +121,14 @@ public class Tree implements TrainedModel {
if (o == null || getClass() != o.getClass()) return false;
Tree that = (Tree) o;
return Objects.equals(featureNames, that.featureNames)
&& Objects.equals(classificationLabels, that.classificationLabels)
&& Objects.equals(targetType, that.targetType)
&& Objects.equals(nodes, that.nodes);
}
@Override
public int hashCode() {
return Objects.hash(featureNames, nodes);
return Objects.hash(featureNames, nodes, targetType, classificationLabels);
}
public static Builder builder() {
@ -109,6 +139,8 @@ public class Tree implements TrainedModel {
private List<String> featureNames;
private ArrayList<TreeNode.Builder> nodes;
private int numNodes;
private TargetType targetType;
private List<String> classificationLabels;
public Builder() {
nodes = new ArrayList<>();
@ -137,6 +169,20 @@ public class Tree implements TrainedModel {
return setNodes(Arrays.asList(nodes));
}
public Builder setTargetType(TargetType targetType) {
this.targetType = targetType;
return this;
}
public Builder setClassificationLabels(List<String> classificationLabels) {
this.classificationLabels = classificationLabels;
return this;
}
private void setTargetType(String targetType) {
this.targetType = TargetType.fromString(targetType);
}
/**
* Add a decision node. Space for the child nodes is allocated
* @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index
@ -185,7 +231,9 @@ public class Tree implements TrainedModel {
public Tree build() {
return new Tree(featureNames,
nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList()));
nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList()),
targetType,
classificationLabels);
}
}

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.client.ml.job.config;
import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;

View File

@ -18,8 +18,8 @@
*/
package org.elasticsearch.client.ml.job.process;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;

View File

@ -18,9 +18,9 @@
*/
package org.elasticsearch.client.ml.job.process;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.results.Result;
import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.client.ml.job.process;
import org.elasticsearch.Version;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;

View File

@ -18,8 +18,8 @@
*/
package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;

View File

@ -18,8 +18,8 @@
*/
package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;

View File

@ -18,8 +18,8 @@
*/
package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;

View File

@ -18,8 +18,8 @@
*/
package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;

View File

@ -18,8 +18,8 @@
*/
package org.elasticsearch.client.ml.job.results;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.util.TimeUtil;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;

View File

@ -1,48 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.job.util;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.time.format.DateTimeFormatter;
import java.util.Date;
public final class TimeUtil {
/**
* Parse out a Date object given the current parser and field name.
*
* @param parser current XContentParser
* @param fieldName the field's preferred name (utilized in exception)
* @return parsed Date object
* @throws IOException from XContentParser
*/
public static Date parseTimeField(XContentParser parser, String fieldName) throws IOException {
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
return new Date(parser.longValue());
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
return new Date(DateFormatters.from(DateTimeFormatter.ISO_INSTANT.parse(parser.text())).toInstant().toEpochMilli());
}
throw new IllegalArgumentException(
"unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]");
}
}

View File

@ -17,14 +17,9 @@
* under the License.
*/
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.common.settings.Settings;
package org.elasticsearch.client.slm;
import java.nio.file.Path;
public class DummyPlugin extends Plugin {
public DummyPlugin(final Settings settings, final Path configPath) {
}
import org.elasticsearch.client.TimedRequest;
public class ExecuteSnapshotLifecycleRetentionRequest extends TimedRequest {
}

View File

@ -19,7 +19,7 @@
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.client.transform.transforms.util.TimeUtil;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;

View File

@ -20,8 +20,8 @@
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.Version;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.transform.transforms.pivot.PivotConfig;
import org.elasticsearch.client.transform.transforms.util.TimeUtil;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;

View File

@ -125,7 +125,9 @@ import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats;
import org.elasticsearch.client.ml.dataframe.OutlierDetection;
import org.elasticsearch.client.ml.dataframe.PhaseProgress;
import org.elasticsearch.client.ml.dataframe.QueryConfig;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.Classification;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric;
@ -1315,6 +1317,41 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
assertThat(createdConfig.getDescription(), equalTo("this is a regression"));
}
public void testPutDataFrameAnalyticsConfig_GivenClassification() throws Exception {
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
String configId = "test-put-df-analytics-classification";
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder()
.setId(configId)
.setSource(DataFrameAnalyticsSource.builder()
.setIndex("put-test-source-index")
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex("put-test-dest-index")
.build())
.setAnalysis(org.elasticsearch.client.ml.dataframe.Classification
.builder("my_dependent_variable")
.setTrainingPercent(80.0)
.build())
.setDescription("this is a classification")
.build();
createIndex("put-test-source-index", defaultMappingForTest());
PutDataFrameAnalyticsResponse putDataFrameAnalyticsResponse = execute(
new PutDataFrameAnalyticsRequest(config),
machineLearningClient::putDataFrameAnalytics, machineLearningClient::putDataFrameAnalyticsAsync);
DataFrameAnalyticsConfig createdConfig = putDataFrameAnalyticsResponse.getConfig();
assertThat(createdConfig.getId(), equalTo(config.getId()));
assertThat(createdConfig.getSource().getIndex(), equalTo(config.getSource().getIndex()));
assertThat(createdConfig.getSource().getQueryConfig(), equalTo(new QueryConfig(new MatchAllQueryBuilder()))); // default value
assertThat(createdConfig.getDest().getIndex(), equalTo(config.getDest().getIndex()));
assertThat(createdConfig.getDest().getResultsField(), equalTo("ml")); // default value
assertThat(createdConfig.getAnalysis(), equalTo(config.getAnalysis()));
assertThat(createdConfig.getAnalyzedFields(), equalTo(config.getAnalyzedFields()));
assertThat(createdConfig.getModelMemoryLimit(), equalTo(ByteSizeValue.parseBytesSizeValue("1gb", ""))); // default value
assertThat(createdConfig.getDescription(), equalTo("this is a classification"));
}
public void testGetDataFrameAnalyticsConfig_SingleConfig() throws Exception {
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
String configId = "get-test-config";
@ -1603,19 +1640,19 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
public void testEvaluateDataFrame_BinarySoftClassification() throws IOException {
String indexName = "evaluate-test-index";
createIndex(indexName, mappingForClassification());
createIndex(indexName, mappingForSoftClassification());
BulkRequest bulk = new BulkRequest()
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(docForClassification(indexName, "blue", false, 0.1)) // #0
.add(docForClassification(indexName, "blue", false, 0.2)) // #1
.add(docForClassification(indexName, "blue", false, 0.3)) // #2
.add(docForClassification(indexName, "blue", false, 0.4)) // #3
.add(docForClassification(indexName, "blue", false, 0.7)) // #4
.add(docForClassification(indexName, "blue", true, 0.2)) // #5
.add(docForClassification(indexName, "green", true, 0.3)) // #6
.add(docForClassification(indexName, "green", true, 0.4)) // #7
.add(docForClassification(indexName, "green", true, 0.8)) // #8
.add(docForClassification(indexName, "green", true, 0.9)); // #9
.add(docForSoftClassification(indexName, "blue", false, 0.1)) // #0
.add(docForSoftClassification(indexName, "blue", false, 0.2)) // #1
.add(docForSoftClassification(indexName, "blue", false, 0.3)) // #2
.add(docForSoftClassification(indexName, "blue", false, 0.4)) // #3
.add(docForSoftClassification(indexName, "blue", false, 0.7)) // #4
.add(docForSoftClassification(indexName, "blue", true, 0.2)) // #5
.add(docForSoftClassification(indexName, "green", true, 0.3)) // #6
.add(docForSoftClassification(indexName, "green", true, 0.4)) // #7
.add(docForSoftClassification(indexName, "green", true, 0.8)) // #8
.add(docForSoftClassification(indexName, "green", true, 0.9)); // #9
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
@ -1677,19 +1714,19 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
public void testEvaluateDataFrame_BinarySoftClassification_WithQuery() throws IOException {
String indexName = "evaluate-with-query-test-index";
createIndex(indexName, mappingForClassification());
createIndex(indexName, mappingForSoftClassification());
BulkRequest bulk = new BulkRequest()
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(docForClassification(indexName, "blue", true, 1.0)) // #0
.add(docForClassification(indexName, "blue", true, 1.0)) // #1
.add(docForClassification(indexName, "blue", true, 1.0)) // #2
.add(docForClassification(indexName, "blue", true, 1.0)) // #3
.add(docForClassification(indexName, "blue", true, 0.0)) // #4
.add(docForClassification(indexName, "blue", true, 0.0)) // #5
.add(docForClassification(indexName, "green", true, 0.0)) // #6
.add(docForClassification(indexName, "green", true, 0.0)) // #7
.add(docForClassification(indexName, "green", true, 0.0)) // #8
.add(docForClassification(indexName, "green", true, 1.0)); // #9
.add(docForSoftClassification(indexName, "blue", true, 1.0)) // #0
.add(docForSoftClassification(indexName, "blue", true, 1.0)) // #1
.add(docForSoftClassification(indexName, "blue", true, 1.0)) // #2
.add(docForSoftClassification(indexName, "blue", true, 1.0)) // #3
.add(docForSoftClassification(indexName, "blue", true, 0.0)) // #4
.add(docForSoftClassification(indexName, "blue", true, 0.0)) // #5
.add(docForSoftClassification(indexName, "green", true, 0.0)) // #6
.add(docForSoftClassification(indexName, "green", true, 0.0)) // #7
.add(docForSoftClassification(indexName, "green", true, 0.0)) // #8
.add(docForSoftClassification(indexName, "green", true, 1.0)); // #9
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
@ -1752,6 +1789,85 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
assertThat(rSquaredResult.getValue(), closeTo(-5.1000000000000005, 1e-9));
}
public void testEvaluateDataFrame_Classification() throws IOException {
String indexName = "evaluate-classification-test-index";
createIndex(indexName, mappingForClassification());
BulkRequest regressionBulk = new BulkRequest()
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(docForClassification(indexName, "cat", "cat"))
.add(docForClassification(indexName, "cat", "cat"))
.add(docForClassification(indexName, "cat", "cat"))
.add(docForClassification(indexName, "cat", "dog"))
.add(docForClassification(indexName, "cat", "fish"))
.add(docForClassification(indexName, "dog", "cat"))
.add(docForClassification(indexName, "dog", "dog"))
.add(docForClassification(indexName, "dog", "dog"))
.add(docForClassification(indexName, "dog", "dog"))
.add(docForClassification(indexName, "horse", "cat"));
highLevelClient().bulk(regressionBulk, RequestOptions.DEFAULT);
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
{ // No size provided for MulticlassConfusionMatrixMetric, default used instead
EvaluateDataFrameRequest evaluateDataFrameRequest =
new EvaluateDataFrameRequest(
indexName,
null,
new Classification(actualClassField, predictedClassField, new MulticlassConfusionMatrixMetric()));
EvaluateDataFrameResponse evaluateDataFrameResponse =
execute(evaluateDataFrameRequest, machineLearningClient::evaluateDataFrame, machineLearningClient::evaluateDataFrameAsync);
assertThat(evaluateDataFrameResponse.getEvaluationName(), equalTo(Classification.NAME));
assertThat(evaluateDataFrameResponse.getMetrics().size(), equalTo(1));
MulticlassConfusionMatrixMetric.Result mcmResult =
evaluateDataFrameResponse.getMetricByName(MulticlassConfusionMatrixMetric.NAME);
assertThat(mcmResult.getMetricName(), equalTo(MulticlassConfusionMatrixMetric.NAME));
Map<String, Map<String, Long>> expectedConfusionMatrix = new HashMap<>();
expectedConfusionMatrix.put("cat", new HashMap<>());
expectedConfusionMatrix.get("cat").put("cat", 3L);
expectedConfusionMatrix.get("cat").put("dog", 1L);
expectedConfusionMatrix.get("cat").put("horse", 0L);
expectedConfusionMatrix.get("cat").put("_other_", 1L);
expectedConfusionMatrix.put("dog", new HashMap<>());
expectedConfusionMatrix.get("dog").put("cat", 1L);
expectedConfusionMatrix.get("dog").put("dog", 3L);
expectedConfusionMatrix.get("dog").put("horse", 0L);
expectedConfusionMatrix.put("horse", new HashMap<>());
expectedConfusionMatrix.get("horse").put("cat", 1L);
expectedConfusionMatrix.get("horse").put("dog", 0L);
expectedConfusionMatrix.get("horse").put("horse", 0L);
assertThat(mcmResult.getConfusionMatrix(), equalTo(expectedConfusionMatrix));
assertThat(mcmResult.getOtherClassesCount(), equalTo(0L));
}
{ // Explicit size provided for MulticlassConfusionMatrixMetric metric
EvaluateDataFrameRequest evaluateDataFrameRequest =
new EvaluateDataFrameRequest(
indexName,
null,
new Classification(actualClassField, predictedClassField, new MulticlassConfusionMatrixMetric(2)));
EvaluateDataFrameResponse evaluateDataFrameResponse =
execute(evaluateDataFrameRequest, machineLearningClient::evaluateDataFrame, machineLearningClient::evaluateDataFrameAsync);
assertThat(evaluateDataFrameResponse.getEvaluationName(), equalTo(Classification.NAME));
assertThat(evaluateDataFrameResponse.getMetrics().size(), equalTo(1));
MulticlassConfusionMatrixMetric.Result mcmResult =
evaluateDataFrameResponse.getMetricByName(MulticlassConfusionMatrixMetric.NAME);
assertThat(mcmResult.getMetricName(), equalTo(MulticlassConfusionMatrixMetric.NAME));
Map<String, Map<String, Long>> expectedConfusionMatrix = new HashMap<>();
expectedConfusionMatrix.put("cat", new HashMap<>());
expectedConfusionMatrix.get("cat").put("cat", 3L);
expectedConfusionMatrix.get("cat").put("dog", 1L);
expectedConfusionMatrix.get("cat").put("_other_", 1L);
expectedConfusionMatrix.put("dog", new HashMap<>());
expectedConfusionMatrix.get("dog").put("cat", 1L);
expectedConfusionMatrix.get("dog").put("dog", 3L);
assertThat(mcmResult.getConfusionMatrix(), equalTo(expectedConfusionMatrix));
assertThat(mcmResult.getOtherClassesCount(), equalTo(1L));
}
}
private static XContentBuilder defaultMappingForTest() throws IOException {
return XContentFactory.jsonBuilder().startObject()
.startObject("properties")
@ -1769,7 +1885,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
private static final String actualField = "label";
private static final String probabilityField = "p";
private static XContentBuilder mappingForClassification() throws IOException {
private static XContentBuilder mappingForSoftClassification() throws IOException {
return XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject(datasetField)
@ -1785,26 +1901,48 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
.endObject();
}
private static IndexRequest docForClassification(String indexName, String dataset, boolean isTrue, double p) {
private static IndexRequest docForSoftClassification(String indexName, String dataset, boolean isTrue, double p) {
return new IndexRequest()
.index(indexName)
.source(XContentType.JSON, datasetField, dataset, actualField, Boolean.toString(isTrue), probabilityField, p);
}
private static final String actualClassField = "actual_class";
private static final String predictedClassField = "predicted_class";
private static XContentBuilder mappingForClassification() throws IOException {
return XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject(actualClassField)
.field("type", "keyword")
.endObject()
.startObject(predictedClassField)
.field("type", "keyword")
.endObject()
.endObject()
.endObject();
}
private static IndexRequest docForClassification(String indexName, String actualClass, String predictedClass) {
return new IndexRequest()
.index(indexName)
.source(XContentType.JSON, actualClassField, actualClass, predictedClassField, predictedClass);
}
private static final String actualRegression = "regression_actual";
private static final String probabilityRegression = "regression_prob";
private static XContentBuilder mappingForRegression() throws IOException {
return XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject(actualRegression)
.field("type", "double")
.startObject(actualRegression)
.field("type", "double")
.endObject()
.startObject(probabilityRegression)
.field("type", "double")
.endObject()
.endObject()
.startObject(probabilityRegression)
.field("type", "double")
.endObject()
.endObject()
.endObject();
.endObject();
}
private static IndexRequest docForRegression(String indexName, double act, double p) {
@ -1819,11 +1957,11 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
public void testEstimateMemoryUsage() throws IOException {
String indexName = "estimate-test-index";
createIndex(indexName, mappingForClassification());
createIndex(indexName, mappingForSoftClassification());
BulkRequest bulk1 = new BulkRequest()
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int i = 0; i < 10; ++i) {
bulk1.add(docForClassification(indexName, randomAlphaOfLength(10), randomBoolean(), randomDoubleBetween(0.0, 1.0, true)));
bulk1.add(docForSoftClassification(indexName, randomAlphaOfLength(10), randomBoolean(), randomDoubleBetween(0.0, 1.0, true)));
}
highLevelClient().bulk(bulk1, RequestOptions.DEFAULT);
@ -1849,7 +1987,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
BulkRequest bulk2 = new BulkRequest()
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int i = 10; i < 100; ++i) {
bulk2.add(docForClassification(indexName, randomAlphaOfLength(10), randomBoolean(), randomDoubleBetween(0.0, 1.0, true)));
bulk2.add(docForSoftClassification(indexName, randomAlphaOfLength(10), randomBoolean(), randomDoubleBetween(0.0, 1.0, true)));
}
highLevelClient().bulk(bulk2, RequestOptions.DEFAULT);

View File

@ -57,7 +57,9 @@ import org.elasticsearch.client.indexlifecycle.ShrinkAction;
import org.elasticsearch.client.indexlifecycle.UnfollowAction;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalysis;
import org.elasticsearch.client.ml.dataframe.OutlierDetection;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.Classification;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric;
@ -65,6 +67,9 @@ import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.Binar
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.Ensemble;
import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedMode;
import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedSum;
import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree;
import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding;
import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding;
@ -681,7 +686,7 @@ public class RestHighLevelClientTests extends ESTestCase {
public void testProvidedNamedXContents() {
List<NamedXContentRegistry.Entry> namedXContents = RestHighLevelClient.getProvidedNamedXContents();
assertEquals(41, namedXContents.size());
assertEquals(48, namedXContents.size());
Map<Class<?>, Integer> categories = new HashMap<>();
List<String> names = new ArrayList<>();
for (NamedXContentRegistry.Entry namedXContent : namedXContents) {
@ -691,7 +696,7 @@ public class RestHighLevelClientTests extends ESTestCase {
categories.put(namedXContent.categoryClass, counter + 1);
}
}
assertEquals("Had: " + categories, 11, categories.size());
assertEquals("Had: " + categories, 12, categories.size());
assertEquals(Integer.valueOf(3), categories.get(Aggregation.class));
assertTrue(names.contains(ChildrenAggregationBuilder.NAME));
assertTrue(names.contains(MatrixStatsAggregationBuilder.NAME));
@ -715,33 +720,39 @@ public class RestHighLevelClientTests extends ESTestCase {
assertTrue(names.contains(ShrinkAction.NAME));
assertTrue(names.contains(FreezeAction.NAME));
assertTrue(names.contains(SetPriorityAction.NAME));
assertEquals(Integer.valueOf(2), categories.get(DataFrameAnalysis.class));
assertEquals(Integer.valueOf(3), categories.get(DataFrameAnalysis.class));
assertTrue(names.contains(OutlierDetection.NAME.getPreferredName()));
assertTrue(names.contains(org.elasticsearch.client.ml.dataframe.Regression.NAME.getPreferredName()));
assertTrue(names.contains(org.elasticsearch.client.ml.dataframe.Classification.NAME.getPreferredName()));
assertEquals(Integer.valueOf(1), categories.get(SyncConfig.class));
assertTrue(names.contains(TimeSyncConfig.NAME));
assertEquals(Integer.valueOf(2), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.Evaluation.class));
assertThat(names, hasItems(BinarySoftClassification.NAME, Regression.NAME));
assertEquals(Integer.valueOf(6), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric.class));
assertEquals(Integer.valueOf(3), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.Evaluation.class));
assertThat(names, hasItems(BinarySoftClassification.NAME, Classification.NAME, Regression.NAME));
assertEquals(Integer.valueOf(7), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric.class));
assertThat(names,
hasItems(AucRocMetric.NAME,
PrecisionMetric.NAME,
RecallMetric.NAME,
ConfusionMatrixMetric.NAME,
MulticlassConfusionMatrixMetric.NAME,
MeanSquaredErrorMetric.NAME,
RSquaredMetric.NAME));
assertEquals(Integer.valueOf(6), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric.Result.class));
assertEquals(Integer.valueOf(7), categories.get(org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric.Result.class));
assertThat(names,
hasItems(AucRocMetric.NAME,
PrecisionMetric.NAME,
RecallMetric.NAME,
ConfusionMatrixMetric.NAME,
MulticlassConfusionMatrixMetric.NAME,
MeanSquaredErrorMetric.NAME,
RSquaredMetric.NAME));
assertEquals(Integer.valueOf(3), categories.get(org.elasticsearch.client.ml.inference.preprocessing.PreProcessor.class));
assertThat(names, hasItems(FrequencyEncoding.NAME, OneHotEncoding.NAME, TargetMeanEncoding.NAME));
assertEquals(Integer.valueOf(1), categories.get(org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel.class));
assertThat(names, hasItems(Tree.NAME));
assertEquals(Integer.valueOf(2), categories.get(org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel.class));
assertThat(names, hasItems(Tree.NAME, Ensemble.NAME));
assertEquals(Integer.valueOf(2),
categories.get(org.elasticsearch.client.ml.inference.trainedmodel.ensemble.OutputAggregator.class));
assertThat(names, hasItems(WeightedMode.NAME, WeightedSum.NAME));
}
public void testApiNamingConventions() throws Exception {

View File

@ -57,6 +57,7 @@ import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.slm.DeleteSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.slm.ExecuteSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.slm.ExecuteSnapshotLifecyclePolicyResponse;
import org.elasticsearch.client.slm.ExecuteSnapshotLifecycleRetentionRequest;
import org.elasticsearch.client.slm.GetSnapshotLifecyclePolicyRequest;
import org.elasticsearch.client.slm.GetSnapshotLifecyclePolicyResponse;
import org.elasticsearch.client.slm.GetSnapshotLifecycleStatsRequest;
@ -987,6 +988,44 @@ public class ILMDocumentationIT extends ESRestHighLevelClientTestCase {
// end::slm-delete-snapshot-lifecycle-policy-execute-async
assertTrue(deleteResp.isAcknowledged());
//////// EXECUTE RETENTION
// tag::slm-execute-snapshot-lifecycle-retention
ExecuteSnapshotLifecycleRetentionRequest req =
new ExecuteSnapshotLifecycleRetentionRequest();
// end::slm-execute-snapshot-lifecycle-retention
// tag::slm-execute-snapshot-lifecycle-retention-execute
AcknowledgedResponse retentionResp =
client.indexLifecycle()
.executeSnapshotLifecycleRetention(req,
RequestOptions.DEFAULT);
// end::slm-execute-snapshot-lifecycle-retention-execute
// tag::slm-execute-snapshot-lifecycle-retention-response
final boolean acked = retentionResp.isAcknowledged();
// end::slm-execute-snapshot-lifecycle-retention-response
// tag::slm-execute-snapshot-lifecycle-policy-execute-listener
ActionListener<AcknowledgedResponse> retentionListener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse r) {
assert r.isAcknowledged(); // <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::slm-execute-snapshot-lifecycle-retention-execute-listener
// tag::slm-execute-snapshot-lifecycle-retention-execute-async
client.indexLifecycle()
.executeSnapshotLifecycleRetentionAsync(req,
RequestOptions.DEFAULT, retentionListener);
// end::slm-execute-snapshot-lifecycle-retention-execute-async
}
private void assertSnapshotExists(final RestHighLevelClient client, final String repo, final String snapshotName) throws Exception {

View File

@ -0,0 +1,54 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class ClassificationTests extends AbstractXContentTestCase<Classification> {
public static Classification randomClassification() {
return Classification.builder(randomAlphaOfLength(10))
.setLambda(randomBoolean() ? null : randomDoubleBetween(0.0, Double.MAX_VALUE, true))
.setGamma(randomBoolean() ? null : randomDoubleBetween(0.0, Double.MAX_VALUE, true))
.setEta(randomBoolean() ? null : randomDoubleBetween(0.001, 1.0, true))
.setMaximumNumberTrees(randomBoolean() ? null : randomIntBetween(1, 2000))
.setFeatureBagFraction(randomBoolean() ? null : randomDoubleBetween(0.0, 1.0, false))
.setPredictionFieldName(randomBoolean() ? null : randomAlphaOfLength(10))
.setTrainingPercent(randomBoolean() ? null : randomDoubleBetween(1.0, 100.0, true))
.build();
}
@Override
protected Classification createTestInstance() {
return randomClassification();
}
@Override
protected Classification doParseInstance(XContentParser parser) throws IOException {
return Classification.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.classification;
import org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.Arrays;
import java.util.function.Predicate;
public class ClassificationTests extends AbstractXContentTestCase<Classification> {
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(new MlEvaluationNamedXContentProvider().getNamedXContentParsers());
}
public static Classification createRandom() {
return new Classification(
randomAlphaOfLength(10),
randomAlphaOfLength(10),
randomBoolean() ? null : Arrays.asList(new MulticlassConfusionMatrixMetric()));
}
@Override
protected Classification createTestInstance() {
return createRandom();
}
@Override
protected Classification doParseInstance(XContentParser parser) throws IOException {
return Classification.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.classification;
import org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class MulticlassConfusionMatrixMetricResultTests extends AbstractXContentTestCase<MulticlassConfusionMatrixMetric.Result> {
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(new MlEvaluationNamedXContentProvider().getNamedXContentParsers());
}
@Override
protected MulticlassConfusionMatrixMetric.Result createTestInstance() {
int numClasses = randomIntBetween(2, 100);
List<String> classNames = Stream.generate(() -> randomAlphaOfLength(10)).limit(numClasses).collect(Collectors.toList());
Map<String, Map<String, Long>> confusionMatrix = new TreeMap<>();
for (int i = 0; i < numClasses; i++) {
Map<String, Long> row = new TreeMap<>();
confusionMatrix.put(classNames.get(i), row);
for (int j = 0; j < numClasses; j++) {
if (randomBoolean()) {
row.put(classNames.get(i), randomNonNegativeLong());
}
}
}
long otherClassesCount = randomNonNegativeLong();
return new MulticlassConfusionMatrixMetric.Result(confusionMatrix, otherClassesCount);
}
@Override
protected MulticlassConfusionMatrixMetric.Result doParseInstance(XContentParser parser) throws IOException {
return MulticlassConfusionMatrixMetric.Result.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
}

View File

@ -0,0 +1,50 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.dataframe.evaluation.classification;
import org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
public class MulticlassConfusionMatrixMetricTests extends AbstractXContentTestCase<MulticlassConfusionMatrixMetric> {
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(new MlEvaluationNamedXContentProvider().getNamedXContentParsers());
}
@Override
protected MulticlassConfusionMatrixMetric createTestInstance() {
Integer size = randomBoolean() ? randomIntBetween(1, 1000) : null;
return new MulticlassConfusionMatrixMetric(size);
}
@Override
protected MulticlassConfusionMatrixMetric doParseInstance(XContentParser parser) throws IOException {
return MulticlassConfusionMatrixMetric.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,112 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.ml.inference;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.Matchers.equalTo;
public class NamedXContentObjectHelperTests extends ESTestCase {
static class NamedTestObject implements NamedXContentObject {
private String fieldValue;
public static final ObjectParser<NamedTestObject, Void> PARSER =
new ObjectParser<>("my_named_object", true, NamedTestObject::new);
static {
PARSER.declareString(NamedTestObject::setFieldValue, new ParseField("my_field"));
}
NamedTestObject() {
}
NamedTestObject(String value) {
this.fieldValue = value;
}
@Override
public String getName() {
return "my_named_object";
}
public void setFieldValue(String value) {
this.fieldValue = value;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (fieldValue != null) {
builder.field("my_field", fieldValue);
}
builder.endObject();
return builder;
}
}
public void testSerializeInOrder() throws IOException {
String expected =
"{\"my_objects\":[{\"my_named_object\":{\"my_field\":\"value1\"}},{\"my_named_object\":{\"my_field\":\"value2\"}}]}";
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.startObject();
List<NamedXContentObject> objects = Arrays.asList(new NamedTestObject("value1"), new NamedTestObject("value2"));
NamedXContentObjectHelper.writeNamedObjects(builder, ToXContent.EMPTY_PARAMS, true, "my_objects", objects);
builder.endObject();
assertThat(BytesReference.bytes(builder).utf8ToString(), equalTo(expected));
}
}
public void testSerialize() throws IOException {
String expected = "{\"my_objects\":{\"my_named_object\":{\"my_field\":\"value1\"},\"my_named_object\":{\"my_field\":\"value2\"}}}";
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.startObject();
List<NamedXContentObject> objects = Arrays.asList(new NamedTestObject("value1"), new NamedTestObject("value2"));
NamedXContentObjectHelper.writeNamedObjects(builder, ToXContent.EMPTY_PARAMS, false, "my_objects", objects);
builder.endObject();
assertThat(BytesReference.bytes(builder).utf8ToString(), equalTo(expected));
}
}
@Override
protected NamedXContentRegistry xContentRegistry() {
List<NamedXContentRegistry.Entry> namedXContent = new ArrayList<>();
namedXContent.addAll(Collections.singletonList(new NamedXContentRegistry.Entry(NamedXContentObject.class,
new ParseField("my_named_object"),
(p, c) -> NamedTestObject.PARSER.apply(p, null))));
namedXContent.addAll(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents());
return new NamedXContentRegistry(namedXContent);
}
}

Some files were not shown because too many files have changed in this diff Show More