Fixup build after opening x-pack
This commit is contained in:
parent
cf607e79d7
commit
cb56bf49d5
|
@ -72,6 +72,7 @@ addSubProjects('', new File(rootProject.projectDir, 'libs'))
|
|||
addSubProjects('', new File(rootProject.projectDir, 'modules'))
|
||||
addSubProjects('', new File(rootProject.projectDir, 'plugins'))
|
||||
addSubProjects('', new File(rootProject.projectDir, 'qa'))
|
||||
addSubProjects('', new File(rootProject.projectDir, 'x-pack'))
|
||||
|
||||
boolean isEclipse = System.getProperty("eclipse.launcher") != null || gradle.startParameter.taskNames.contains('eclipse') || gradle.startParameter.taskNames.contains('cleanEclipse')
|
||||
if (isEclipse) {
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
# This file is used with all of the non-matrix tests in Jenkins.
|
||||
|
||||
# This .properties file defines the versions of Java with which to
|
||||
# build and test Elasticsearch for this branch. Valid Java versions
|
||||
# are 'java' or 'openjdk' followed by the major release number.
|
||||
|
||||
ES_BUILD_JAVA=java10
|
||||
ES_RUNTIME_JAVA=java8
|
|
@ -1,2 +0,0 @@
|
|||
ES_BUILD_JAVA:
|
||||
- java10
|
|
@ -1 +0,0 @@
|
|||
exclude:
|
|
@ -1,3 +0,0 @@
|
|||
ES_RUNTIME_JAVA:
|
||||
- java8
|
||||
- java10
|
|
@ -1,88 +0,0 @@
|
|||
((java-mode
|
||||
.
|
||||
((eval
|
||||
.
|
||||
(progn
|
||||
(defun my/point-in-defun-declaration-p ()
|
||||
(let ((bod (save-excursion (c-beginning-of-defun)
|
||||
(point))))
|
||||
(<= bod
|
||||
(point)
|
||||
(save-excursion (goto-char bod)
|
||||
(re-search-forward "{")
|
||||
(point)))))
|
||||
|
||||
(defun my/is-string-concatenation-p ()
|
||||
"Returns true if the previous line is a string concatenation"
|
||||
(save-excursion
|
||||
(let ((start (point)))
|
||||
(forward-line -1)
|
||||
(if (re-search-forward " \\\+$" start t) t nil))))
|
||||
|
||||
(defun my/inside-java-lambda-p ()
|
||||
"Returns true if point is the first statement inside of a lambda"
|
||||
(save-excursion
|
||||
(c-beginning-of-statement-1)
|
||||
(let ((start (point)))
|
||||
(forward-line -1)
|
||||
(if (search-forward " -> {" start t) t nil))))
|
||||
|
||||
(defun my/trailing-paren-p ()
|
||||
"Returns true if point is a training paren and semicolon"
|
||||
(save-excursion
|
||||
(end-of-line)
|
||||
(let ((endpoint (point)))
|
||||
(beginning-of-line)
|
||||
(if (re-search-forward "[ ]*);$" endpoint t) t nil))))
|
||||
|
||||
(defun my/prev-line-call-with-no-args-p ()
|
||||
"Return true if the previous line is a function call with no arguments"
|
||||
(save-excursion
|
||||
(let ((start (point)))
|
||||
(forward-line -1)
|
||||
(if (re-search-forward ".($" start t) t nil))))
|
||||
|
||||
(defun my/arglist-cont-nonempty-indentation (arg)
|
||||
(if (my/inside-java-lambda-p)
|
||||
'+
|
||||
(if (my/is-string-concatenation-p)
|
||||
16
|
||||
(unless (my/point-in-defun-declaration-p) '++))))
|
||||
|
||||
(defun my/statement-block-intro (arg)
|
||||
(if (and (c-at-statement-start-p) (my/inside-java-lambda-p)) 0 '+))
|
||||
|
||||
(defun my/block-close (arg)
|
||||
(if (my/inside-java-lambda-p) '- 0))
|
||||
|
||||
(defun my/arglist-close (arg) (if (my/trailing-paren-p) 0 '--))
|
||||
|
||||
(defun my/arglist-intro (arg)
|
||||
(if (my/prev-line-call-with-no-args-p) '++ 0))
|
||||
|
||||
(c-set-offset 'inline-open 0)
|
||||
(c-set-offset 'topmost-intro-cont '+)
|
||||
(c-set-offset 'statement-block-intro 'my/statement-block-intro)
|
||||
(c-set-offset 'block-close 'my/block-close)
|
||||
(c-set-offset 'knr-argdecl-intro '+)
|
||||
(c-set-offset 'substatement-open '+)
|
||||
(c-set-offset 'substatement-label '+)
|
||||
(c-set-offset 'case-label '+)
|
||||
(c-set-offset 'label '+)
|
||||
(c-set-offset 'statement-case-open '+)
|
||||
(c-set-offset 'statement-cont '++)
|
||||
(c-set-offset 'arglist-intro 'my/arglist-intro)
|
||||
(c-set-offset 'arglist-cont-nonempty '(my/arglist-cont-nonempty-indentation c-lineup-arglist))
|
||||
(c-set-offset 'arglist-close 'my/arglist-close)
|
||||
(c-set-offset 'inexpr-class 0)
|
||||
(c-set-offset 'access-label 0)
|
||||
(c-set-offset 'inher-intro '++)
|
||||
(c-set-offset 'inher-cont '++)
|
||||
(c-set-offset 'brace-list-intro '+)
|
||||
(c-set-offset 'func-decl-cont '++)
|
||||
))
|
||||
(c-basic-offset . 4)
|
||||
(c-comment-only-line-offset . (0 . 0))
|
||||
(fill-column . 140)
|
||||
(fci-rule-column . 140)
|
||||
(compile-command . "gradle compileTestJava"))))
|
|
@ -1,6 +0,0 @@
|
|||
<!--
|
||||
Please do not submit any issues related to security vulnerabilities that
|
||||
could be exploited by an attacker. Instead, send an email to
|
||||
security@elastic.co. If you have any doubts, send an email to
|
||||
security@elastic.co.
|
||||
-->
|
|
@ -1,54 +0,0 @@
|
|||
.idea/
|
||||
.gradle/
|
||||
*.iml
|
||||
*.ipr
|
||||
*.iws
|
||||
work/
|
||||
/data/
|
||||
logs/
|
||||
.DS_Store
|
||||
build/
|
||||
build-idea/
|
||||
build-eclipse/
|
||||
generated-resources/
|
||||
target/
|
||||
*-execution-hints.log
|
||||
docs/html/
|
||||
docs/build.log
|
||||
npm-debug.log
|
||||
/tmp/
|
||||
backwards/
|
||||
html_docs
|
||||
.vagrant/
|
||||
vendor/
|
||||
.bundle
|
||||
Gemfile.lock
|
||||
|
||||
## eclipse ignores (use 'mvn eclipse:eclipse' to build eclipse projects)
|
||||
## All files (.project, .classpath, .settings/*) should be generated through Maven which
|
||||
## will correctly set the classpath based on the declared dependencies and write settings
|
||||
## files to ensure common coding style across Eclipse and IDEA.
|
||||
.project
|
||||
.classpath
|
||||
eclipse-build
|
||||
*/.project
|
||||
*/.classpath
|
||||
*/eclipse-build
|
||||
.settings
|
||||
!/.settings/org.eclipse.core.resources.prefs
|
||||
!/.settings/org.eclipse.jdt.core.prefs
|
||||
!/.settings/org.eclipse.jdt.ui.prefs
|
||||
|
||||
## netbeans ignores
|
||||
nb-configuration.xml
|
||||
nbactions.xml
|
||||
|
||||
dependency-reduced-pom.xml
|
||||
github.token
|
||||
|
||||
## ignore attachment files
|
||||
.local-*
|
||||
*/.local-*
|
||||
|
||||
## ignore antlr temporary files used by vscode-antlr4
|
||||
.antlr
|
|
@ -1,8 +0,0 @@
|
|||
-/target
|
||||
-/license/target
|
||||
-/marvel/target
|
||||
-/qa/target
|
||||
-/shield/target
|
||||
-/watcher/target
|
||||
-/x-dev-tools/target
|
||||
-*.class
|
|
@ -3,12 +3,6 @@ import org.elasticsearch.gradle.plugin.PluginBuildPlugin
|
|||
import org.elasticsearch.gradle.Version
|
||||
import org.elasticsearch.gradle.precommit.LicenseHeadersTask
|
||||
|
||||
if (project.projectDir.name != 'x-pack-elasticsearch') {
|
||||
throw new GradleException('You must checkout x-pack-elasticsearch in the following directory: <path to Elasticsearch checkout>/../elasticsearch-extra/x-pack-elasticsearch')
|
||||
}
|
||||
|
||||
task wrapper(type: Wrapper)
|
||||
|
||||
Project xpackRootProject = project
|
||||
|
||||
subprojects {
|
||||
|
@ -78,9 +72,7 @@ subprojects {
|
|||
bwcVersions.snapshotProjectNames.each { snapshotName ->
|
||||
Version snapshot = bwcVersions.getSnapshotForProject(snapshotName)
|
||||
if (snapshot != null && snapshot.onOrAfter("6.3.0")) {
|
||||
String snapshotProject = ":x-pack-elasticsearch:plugin:bwc:${snapshotName}"
|
||||
String snapshotProject = ":x-pack:plugin:bwc:${snapshotName}"
|
||||
project(snapshotProject).ext.bwcVersion = snapshot
|
||||
ext.projectSubstitutions["org.elasticsearch.plugin:x-pack:${snapshot}"] = snapshotProject
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
File extrasDir = new File(settingsDir, '../..').getCanonicalFile()
|
||||
if (extrasDir.name.endsWith('-extra') == false) {
|
||||
throw new GradleException("x-pack-elasticsearch must be checked out under an elasticsearch-extra directory, found ${extrasDir.name}")
|
||||
}
|
||||
File elasticsearchDir = new File(extrasDir.parentFile, extrasDir.name[0..-7])
|
||||
if (elasticsearchDir.exists() == false) {
|
||||
throw new GradleException("${elasticsearchDir.name} is missing as a sibling to ${extrasDir.name}")
|
||||
}
|
||||
|
||||
project(':').projectDir = new File(elasticsearchDir, 'buildSrc')
|
|
@ -1,195 +0,0 @@
|
|||
#!/bin/bash
|
||||
# This script is used as a single command to run the x-pack tests.
|
||||
#
|
||||
# It will attempt to check out 'elasticsearch' into a sibling directory
|
||||
# unless the environment variable `USE_EXISTING_ES` has a value. The
|
||||
# branch of elasticsearch which will be checked out depends on
|
||||
# environment variables. If running locally, set GIT_BRANCH. When
|
||||
# running in Jenkins, that env var is set. When running a PR
|
||||
# jenkins job, the variables PR_SOURCE_BRANCH and PR_TARGET_BRANCH
|
||||
# will be set and the source branch will be looked for in elasticsearch
|
||||
# before falling back to the target branch name.
|
||||
#
|
||||
# It will also attempt to install the appropriate version of node.js
|
||||
# for the Kibana plugin tests using nvm, unless
|
||||
# `xpack.kibana.build=false` is defined in
|
||||
# ~/.gradle/gradle.properties. Set a custom nvm directory using the
|
||||
# `NVM_DIR` environment variable.
|
||||
#
|
||||
|
||||
# Turn on semi-strict mode
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
# Allow the user choose different test through a single cli arg
|
||||
# default to `check` if no argument has been supplied
|
||||
key=${1-check}
|
||||
case $key in
|
||||
intake)
|
||||
GRADLE_CLI_ARGS=(
|
||||
"--info"
|
||||
"compileJava"
|
||||
"compileTestJava"
|
||||
"precommit"
|
||||
"check"
|
||||
"-Dtests.network=true"
|
||||
"-Dtests.badapples=true"
|
||||
)
|
||||
;;
|
||||
packagingTest)
|
||||
GRADLE_CLI_ARGS=(
|
||||
"--info"
|
||||
"-Pvagrant.boxes=all"
|
||||
"packagingTest"
|
||||
)
|
||||
;;
|
||||
packagingTestSample)
|
||||
GRADLE_CLI_ARGS=(
|
||||
"--info"
|
||||
"-Pvagrant.boxes=sample"
|
||||
"packagingTest"
|
||||
)
|
||||
;;
|
||||
bwcTest)
|
||||
GRADLE_CLI_ARGS=(
|
||||
"--info"
|
||||
"bwcTest"
|
||||
)
|
||||
;;
|
||||
check)
|
||||
GRADLE_CLI_ARGS=(
|
||||
"--info"
|
||||
"check"
|
||||
"-Dtests.network=true"
|
||||
"-Dtests.badapples=true"
|
||||
)
|
||||
;;
|
||||
releaseTest)
|
||||
GRADLE_CLI_ARGS=(
|
||||
"--info"
|
||||
"check"
|
||||
"-Dtests.network=true"
|
||||
"-Dtests.badapples=true"
|
||||
"-Dbuild.snapshot=false"
|
||||
"-Dlicense.key=/etc/x-pack/license.key"
|
||||
"-Dtests.jvm.argline=-Dbuild.snapshot=false"
|
||||
)
|
||||
;;
|
||||
*)
|
||||
echo "Unsupported cli argument $1. Allowed arguments are packagingTest or check. No argument defaults to check."
|
||||
exit 1;;
|
||||
esac
|
||||
|
||||
SCRIPT="$0"
|
||||
|
||||
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
|
||||
while [ -h "$SCRIPT" ] ; do
|
||||
ls=$(ls -ld "$SCRIPT")
|
||||
# Drop everything prior to ->
|
||||
link=$(expr "$ls" : '.*-> \(.*\)$')
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
SCRIPT="$link"
|
||||
else
|
||||
SCRIPT=$(dirname "$SCRIPT")/"$link"
|
||||
fi
|
||||
done
|
||||
|
||||
# determine base directory
|
||||
BASE_DIR=$(dirname "$SCRIPT")/..
|
||||
|
||||
# make BASE_DIR absolute
|
||||
BASE_DIR=$(cd "$BASE_DIR"; pwd)
|
||||
|
||||
PARENT_DIR=$(cd "$BASE_DIR"/../..; pwd)
|
||||
|
||||
# go to the parent directory
|
||||
cd $PARENT_DIR
|
||||
|
||||
if [ -z ${USE_EXISTING_ES:+x} ]; then
|
||||
if [ -d "./elasticsearch" ]; then
|
||||
echo "I expected a clean workspace but an 'elasticsearch' sibling directory already exists in [$PARENT_DIR]!"
|
||||
echo
|
||||
echo "Either define 'USE_EXISTING_ES' or remove the existing 'elasticsearch' sibling."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
function pick_clone_target {
|
||||
echo "picking which branch of elasticsearch to clone"
|
||||
|
||||
# PR_* are provided by the CI git plugin for pull requests
|
||||
if [[ -n "$PR_AUTHOR" && -n "$PR_SOURCE_BRANCH" ]]; then
|
||||
GH_USER="$PR_AUTHOR"
|
||||
BRANCH="$PR_SOURCE_BRANCH"
|
||||
echo " -> using pull request author $GH_USER and branch $BRANCH"
|
||||
if [[ -n "$(git ls-remote --heads https://github.com/$GH_USER/elasticsearch.git $BRANCH 2>/dev/null)" ]]; then
|
||||
return
|
||||
fi
|
||||
fi
|
||||
GH_USER="elastic"
|
||||
# GIT_BRANCH is provided by normal CI runs. It starts with the repo, i.e., origin/master
|
||||
# If we are not in CI, we fall back to the master branch
|
||||
BRANCH="${PR_TARGET_BRANCH:-${GIT_BRANCH#*/}}"
|
||||
BRANCH="${BRANCH:-master}"
|
||||
echo " -> using CI branch $BRANCH from elastic repo"
|
||||
}
|
||||
|
||||
pick_clone_target
|
||||
|
||||
DEPTH=1
|
||||
if [ -n "$BUILD_METADATA" ]; then
|
||||
IFS=';' read -ra metadata <<< "$BUILD_METADATA"
|
||||
for kv in "${metadata[@]}"; do
|
||||
IFS='=' read -ra key_value <<< "$kv"
|
||||
if [ "${key_value[0]}" == "git_ref_elasticsearch" ]; then
|
||||
# Force checked out hash if build metadata is set. We use a depth of 100, which
|
||||
# assumes there are no more than 100 commits between head of the branch and
|
||||
# last-good-commit. This is still quite a bit faster than pulling the entire history.
|
||||
ES_REF="${key_value[1]}"
|
||||
DEPTH=100
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
echo " -> checking out '$BRANCH' branch from $GH_USER/elasticsearch..."
|
||||
git clone -b $BRANCH "https://github.com/$GH_USER/elasticsearch.git" --depth=$DEPTH
|
||||
|
||||
if [ ! -z $ES_REF ]; then
|
||||
echo " -> using elasticsearch ref from build metadata: $ES_REF"
|
||||
git -C elasticsearch checkout $ES_REF
|
||||
else
|
||||
ES_REF="$(git -C elasticsearch rev-parse HEAD)"
|
||||
fi
|
||||
|
||||
echo " -> checked out elasticsearch revision: $ES_REF"
|
||||
echo
|
||||
|
||||
else
|
||||
if [ -d "./elasticsearch" ]; then
|
||||
echo "Using existing 'elasticsearch' checkout"
|
||||
else
|
||||
echo "You have defined 'USE_EXISTING_ES' but no existing Elasticsearch directory exists!"
|
||||
exit 2
|
||||
fi
|
||||
fi
|
||||
|
||||
# back to base directory
|
||||
cd "$BASE_DIR"
|
||||
|
||||
echo "Running x-pack-elasticsearch tests..."
|
||||
echo "Running in $PWD"
|
||||
|
||||
# output the commands
|
||||
set -xuf
|
||||
|
||||
# clean
|
||||
./gradlew --stacktrace clean -Dorg.gradle.java.home=${RUNTIME_JAVA_HOME:-$JAVA_HOME}
|
||||
|
||||
# Actually run the tests
|
||||
GRADLE_CLI_ARGS+=("-Dorg.gradle.java.home=${RUNTIME_JAVA_HOME:-$JAVA_HOME}")
|
||||
./gradlew "${GRADLE_CLI_ARGS[@]}"
|
||||
|
||||
# write the ES hash we checked out to build metadata
|
||||
mkdir build
|
||||
echo "git_ref_elasticsearch=$ES_REF" > build/build_metadata
|
||||
|
||||
# ~*~ shell-script-mode ~*~
|
|
@ -1,184 +0,0 @@
|
|||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use HTTP::Tiny;
|
||||
use IO::Socket::SSL 1.52;
|
||||
use utf8;
|
||||
use Getopt::Long;
|
||||
|
||||
my $Base_URL = "https://api.github.com/repos/";
|
||||
my $User_Repo = 'elastic/x-pack-elasticsearch/';
|
||||
my $Issue_URL = "https://github.com/${User_Repo}issues";
|
||||
use JSON();
|
||||
use URI();
|
||||
use URI::Escape qw(uri_escape_utf8);
|
||||
|
||||
our $json = JSON->new->utf8(1);
|
||||
our $http = HTTP::Tiny->new(
|
||||
default_headers => {
|
||||
Accept => "application/vnd.github.v3+json",
|
||||
Authorization => load_github_key()
|
||||
}
|
||||
);
|
||||
|
||||
my %Opts = ( state => 'open' );
|
||||
|
||||
GetOptions(
|
||||
\%Opts, #
|
||||
'state=s', 'labels=s', 'add=s', 'remove=s'
|
||||
) || exit usage();
|
||||
|
||||
die usage('--state must be one of open|all|closed')
|
||||
unless $Opts{state} =~ /^(open|all|closed)$/;
|
||||
|
||||
die usage('--labels is required') unless $Opts{labels};
|
||||
die usage('Either --add or --remove is required')
|
||||
unless $Opts{add} || $Opts{remove};
|
||||
|
||||
relabel();
|
||||
|
||||
#===================================
|
||||
sub relabel {
|
||||
#===================================
|
||||
my @remove = split /,/, ( $Opts{remove} || '' );
|
||||
my @add = split /,/, ( $Opts{add} || '' );
|
||||
my $add_json = $json->encode( \@add );
|
||||
my $url = URI->new( $Base_URL . $User_Repo . 'issues' );
|
||||
$url->query_form(
|
||||
state => $Opts{state},
|
||||
labels => $Opts{labels},
|
||||
per_page => 100
|
||||
);
|
||||
|
||||
my $spool = Spool->new($url);
|
||||
while ( my $issue = $spool->next ) {
|
||||
my $id = $issue->{number};
|
||||
print "$Issue_URL/$id\n";
|
||||
if (@add) {
|
||||
add_label( $id, $add_json );
|
||||
}
|
||||
for (@remove) {
|
||||
remove_label( $id, $_ );
|
||||
}
|
||||
}
|
||||
print "Done\n";
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub add_label {
|
||||
#===================================
|
||||
my ( $id, $json ) = @_;
|
||||
my $response = $http->post(
|
||||
$Base_URL . $User_Repo . "issues/$id/labels",
|
||||
{ content => $json,
|
||||
headers => { "Content-Type" => "application/json; charset=utf-8" }
|
||||
}
|
||||
);
|
||||
|
||||
die "$response->{status} $response->{reason}\n"
|
||||
unless $response->{success};
|
||||
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub remove_label {
|
||||
#===================================
|
||||
my ( $id, $name ) = @_;
|
||||
my $url
|
||||
= $Base_URL
|
||||
. $User_Repo
|
||||
. "issues/$id/labels/"
|
||||
. uri_escape_utf8($name);
|
||||
my $response = $http->delete($url);
|
||||
|
||||
die "$response->{status} $response->{reason}\n"
|
||||
unless $response->{success};
|
||||
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub load_github_key {
|
||||
#===================================
|
||||
my ($file) = glob("~/.github_auth");
|
||||
unless ( -e $file ) {
|
||||
warn "File ~/.github_auth doesn't exist - using anonymous API. "
|
||||
. "Generate a Personal Access Token at https://github.com/settings/applications\n";
|
||||
return '';
|
||||
}
|
||||
open my $fh, $file or die "Couldn't open $file: $!";
|
||||
my ($key) = <$fh> || die "Couldn't read $file: $!";
|
||||
$key =~ s/^\s+//;
|
||||
$key =~ s/\s+$//;
|
||||
die "Invalid GitHub key: $key"
|
||||
unless $key =~ /^[0-9a-f]{40}$/;
|
||||
return "token $key";
|
||||
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub usage {
|
||||
#===================================
|
||||
my $msg = shift || '';
|
||||
|
||||
if ($msg) {
|
||||
$msg = "\nERROR: $msg\n\n";
|
||||
}
|
||||
return $msg . <<"USAGE";
|
||||
$0 --state=open|closed|all --labels=foo,bar --add=new1,new2 --remove=old1,old2
|
||||
|
||||
USAGE
|
||||
|
||||
}
|
||||
|
||||
package Spool;
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
#===================================
|
||||
sub new {
|
||||
#===================================
|
||||
my $class = shift;
|
||||
my $url = shift;
|
||||
return bless {
|
||||
url => $url,
|
||||
buffer => []
|
||||
},
|
||||
$class;
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub next {
|
||||
#===================================
|
||||
my $self = shift;
|
||||
if ( @{ $self->{buffer} } == 0 ) {
|
||||
$self->refill;
|
||||
}
|
||||
return shift @{ $self->{buffer} };
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub refill {
|
||||
#===================================
|
||||
my $self = shift;
|
||||
return unless $self->{url};
|
||||
my $response = $http->get( $self->{url} );
|
||||
die "$response->{status} $response->{reason}\n"
|
||||
unless $response->{success};
|
||||
|
||||
$self->{url} = '';
|
||||
|
||||
if ( my $link = $response->{headers}{link} ) {
|
||||
my @links = ref $link eq 'ARRAY' ? @$link : $link;
|
||||
for ($link) {
|
||||
next unless $link =~ /<([^>]+)>; rel="next"/;
|
||||
$self->{url} = $1;
|
||||
last;
|
||||
}
|
||||
}
|
||||
|
||||
push @{ $self->{buffer} }, @{ $json->decode( $response->{content} ) };
|
||||
|
||||
}
|
|
@ -1,253 +0,0 @@
|
|||
#!/usr/bin/env perl
|
||||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on
|
||||
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
# either express or implied. See the License for the specific
|
||||
# language governing permissions and limitations under the License.
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use HTTP::Tiny 0.070;
|
||||
use IO::Socket::SSL 1.52;
|
||||
use utf8;
|
||||
|
||||
my $Github_Key = load_github_key();
|
||||
my $Base_URL = "https://${Github_Key}api.github.com/repos/";
|
||||
my $User_Repo = 'elastic/x-pack-elasticsearch/';
|
||||
my $Issue_URL = "http://github.com/${User_Repo}issues/";
|
||||
|
||||
my @Groups = (
|
||||
"breaking", "breaking-java", "deprecation", "feature",
|
||||
"enhancement", "bug", "regression", "upgrade", "non-issue", "build",
|
||||
"docs", "test"
|
||||
);
|
||||
my %Group_Labels = (
|
||||
breaking => 'Breaking changes',
|
||||
'breaking-java' => 'Breaking Java changes',
|
||||
build => 'Build',
|
||||
deprecation => 'Deprecations',
|
||||
docs => 'Docs',
|
||||
feature => 'New features',
|
||||
enhancement => 'Enhancements',
|
||||
bug => 'Bug fixes',
|
||||
regression => 'Regressions',
|
||||
test => 'Tests',
|
||||
upgrade => 'Upgrades',
|
||||
"non-issue" => 'Non-issue',
|
||||
other => 'NOT CLASSIFIED',
|
||||
);
|
||||
|
||||
use JSON();
|
||||
use Encode qw(encode_utf8);
|
||||
|
||||
my $json = JSON->new->utf8(1);
|
||||
|
||||
my %All_Labels = fetch_labels();
|
||||
|
||||
my $version = shift @ARGV
|
||||
or dump_labels();
|
||||
|
||||
dump_labels("Unknown version '$version'")
|
||||
unless $All_Labels{$version};
|
||||
|
||||
my $issues = fetch_issues($version);
|
||||
dump_issues( $version, $issues );
|
||||
|
||||
#===================================
|
||||
sub dump_issues {
|
||||
#===================================
|
||||
my $version = shift;
|
||||
my $issues = shift;
|
||||
|
||||
$version =~ s/v//;
|
||||
my ( $day, $month, $year ) = (gmtime)[ 3 .. 5 ];
|
||||
$month++;
|
||||
$year += 1900;
|
||||
|
||||
print <<"ASCIIDOC";
|
||||
:issue: https://github.com/${User_Repo}issues/
|
||||
:pull: https://github.com/${User_Repo}pull/
|
||||
|
||||
[[release-notes-$version]]
|
||||
== $version Release Notes
|
||||
|
||||
ASCIIDOC
|
||||
|
||||
for my $group ( @Groups, 'other' ) {
|
||||
my $group_issues = $issues->{$group} or next;
|
||||
print "[[$group-$version]]\n"
|
||||
. "[float]\n"
|
||||
. "=== $Group_Labels{$group}\n\n";
|
||||
|
||||
for my $header ( sort keys %$group_issues ) {
|
||||
my $header_issues = $group_issues->{$header};
|
||||
print( $header || 'HEADER MISSING', "::\n" );
|
||||
|
||||
for my $issue (@$header_issues) {
|
||||
my $title = $issue->{title};
|
||||
|
||||
if ( $issue->{state} eq 'open' ) {
|
||||
$title .= " [OPEN]";
|
||||
}
|
||||
unless ( $issue->{pull_request} ) {
|
||||
$title .= " [ISSUE]";
|
||||
}
|
||||
my $number = $issue->{number};
|
||||
|
||||
# print encode_utf8("* $title {pull}${number}[#${number}]");
|
||||
print encode_utf8("* $title");
|
||||
print "\n";
|
||||
print encode_utf8("// https://github.com/${User_Repo}pull/${number}[#${number}]");
|
||||
if ( my $related = $issue->{related_issues} ) {
|
||||
my %uniq = map { $_ => 1 } @$related;
|
||||
print keys %uniq > 1
|
||||
? " (issues: "
|
||||
: " (issue: ";
|
||||
# print join ", ", map {"{issue}${_}[#${_}]"}
|
||||
# print join ", ", map {"#${_}"}
|
||||
print join ", ", map {"https://github.com/${User_Repo}issues/${_}[#${_}]"}
|
||||
sort keys %uniq;
|
||||
print ")";
|
||||
}
|
||||
print "\n";
|
||||
}
|
||||
print "\n";
|
||||
}
|
||||
print "\n\n";
|
||||
}
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub fetch_issues {
|
||||
#===================================
|
||||
my $version = shift;
|
||||
my @issues;
|
||||
my %seen;
|
||||
for my $state ( 'open', 'closed' ) {
|
||||
my $page = 1;
|
||||
while (1) {
|
||||
my $tranche
|
||||
= fetch( $User_Repo
|
||||
. 'issues?labels='
|
||||
. $version
|
||||
. '&pagesize=100&state='
|
||||
. $state
|
||||
. '&page='
|
||||
. $page )
|
||||
or die "Couldn't fetch issues for version '$version'";
|
||||
push @issues, @$tranche;
|
||||
|
||||
for my $issue (@$tranche) {
|
||||
next unless $issue->{pull_request};
|
||||
for ( $issue->{body} =~ m{(?:#|${User_Repo}issues/)(\d+)}g ) {
|
||||
$seen{$_}++;
|
||||
push @{ $issue->{related_issues} }, $_;
|
||||
}
|
||||
}
|
||||
$page++;
|
||||
last unless @$tranche;
|
||||
}
|
||||
}
|
||||
|
||||
my %group;
|
||||
ISSUE:
|
||||
for my $issue (@issues) {
|
||||
next if $seen{ $issue->{number} } && !$issue->{pull_request};
|
||||
|
||||
# uncomment for including/excluding PRs already issued in other versions
|
||||
# next if grep {$_->{name}=~/^v2/} @{$issue->{labels}};
|
||||
my %labels = map { $_->{name} => 1 } @{ $issue->{labels} };
|
||||
my ($header) = map { substr( $_, 1 ) } grep {/^:/} sort keys %labels;
|
||||
$header ||= 'NOT CLASSIFIED';
|
||||
for (@Groups) {
|
||||
if ( $labels{$_} ) {
|
||||
push @{ $group{$_}{$header} }, $issue;
|
||||
next ISSUE;
|
||||
}
|
||||
}
|
||||
push @{ $group{other}{$header} }, $issue;
|
||||
}
|
||||
|
||||
return \%group;
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub fetch_labels {
|
||||
#===================================
|
||||
my %all;
|
||||
my $page = 1;
|
||||
while (1) {
|
||||
my $labels = fetch( $User_Repo . 'labels?page=' . $page++ )
|
||||
or die "Couldn't retrieve version labels";
|
||||
last unless @$labels;
|
||||
for (@$labels) {
|
||||
my $name = $_->{name};
|
||||
next unless $name =~ /^v/;
|
||||
$all{$name} = 1;
|
||||
}
|
||||
}
|
||||
return %all;
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub fetch {
|
||||
#===================================
|
||||
my $url = $Base_URL . shift();
|
||||
# print "$url\n";
|
||||
my $response = HTTP::Tiny->new->get($url);
|
||||
# use Data::Dumper;
|
||||
# print Dumper($response);
|
||||
die "$response->{status} $response->{reason}\n"
|
||||
unless $response->{success};
|
||||
# print $response->{content};
|
||||
return $json->decode( $response->{content} );
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub load_github_key {
|
||||
#===================================
|
||||
|
||||
my ($file) = glob("~/.github_auth");
|
||||
unless ( -e $file ) {
|
||||
warn "File ~/.github_auth doesn't exist - using anonymous API. "
|
||||
. "Generate a personal access token that has repo scope. See https://github.com/elastic/dev/blob/master/shared/development_process.md \n";
|
||||
return '';
|
||||
}
|
||||
open my $fh, $file or die "Couldn't open $file: $!";
|
||||
my ($key) = <$fh> || die "Couldn't read $file: $!";
|
||||
$key =~ s/^\s+//;
|
||||
$key =~ s/\s+$//;
|
||||
die "Invalid GitHub key: $key"
|
||||
unless $key =~ /^[0-9a-f]{40}$/;
|
||||
return "$key:x-oauth-basic@";
|
||||
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub dump_labels {
|
||||
#===================================
|
||||
my $error = shift || '';
|
||||
if ($error) {
|
||||
$error = "\nERROR: $error\n";
|
||||
}
|
||||
my $labels = join( "\n - ", '', ( sort keys %All_Labels ) );
|
||||
die <<USAGE
|
||||
$error
|
||||
USAGE: $0 version > outfile
|
||||
|
||||
Known versions:$labels
|
||||
|
||||
USAGE
|
||||
|
||||
}
|
|
@ -1,270 +0,0 @@
|
|||
#!/usr/bin/env perl
|
||||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on
|
||||
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
# either express or implied. See the License for the specific
|
||||
# language governing permissions and limitations under the License.
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use HTTP::Tiny 0.070;
|
||||
use IO::Socket::SSL 1.52;
|
||||
use utf8;
|
||||
|
||||
my $Github_Key = load_github_key();
|
||||
my $Base_URL = "https://${Github_Key}api.github.com/repos/";
|
||||
my $User_Repo1 = 'elastic/x-pack-elasticsearch/';
|
||||
my $Issue_URL1 = "http://github.com/${User_Repo1}issues/";
|
||||
my $User_Repo2 = 'elastic/machine-learning-cpp/';
|
||||
my $Issue_URL2 = "http://github.com/${User_Repo2}issues/";
|
||||
|
||||
my @Groups = (
|
||||
"breaking", "breaking-java", "deprecation", "feature",
|
||||
"enhancement", "bug", "regression", "upgrade", "non-issue", "build",
|
||||
"docs", "test"
|
||||
);
|
||||
my %Group_Labels = (
|
||||
breaking => 'Breaking changes',
|
||||
'breaking-java' => 'Breaking Java changes',
|
||||
build => 'Build',
|
||||
deprecation => 'Deprecations',
|
||||
docs => 'Docs',
|
||||
feature => 'New features',
|
||||
enhancement => 'Enhancements',
|
||||
bug => 'Bug fixes',
|
||||
regression => 'Regressions',
|
||||
test => 'Tests',
|
||||
upgrade => 'Upgrades',
|
||||
"non-issue" => 'Non-issue',
|
||||
other => 'NOT CLASSIFIED',
|
||||
);
|
||||
|
||||
use JSON();
|
||||
use Encode qw(encode_utf8);
|
||||
|
||||
my $json = JSON->new->utf8(1);
|
||||
|
||||
my %All_Labels1 = fetch_labels($User_Repo1);
|
||||
|
||||
my $version = shift @ARGV
|
||||
or dump_labels();
|
||||
|
||||
dump_labels(%All_Labels1, "Unknown version '$version'")
|
||||
unless $All_Labels1{$version};
|
||||
|
||||
my $issues1 = fetch_issues($User_Repo1, $version);
|
||||
|
||||
# Repeat steps for second repo
|
||||
|
||||
my %All_Labels2 = fetch_labels($User_Repo2);
|
||||
|
||||
dump_labels(%All_Labels2, "Unknown version '$version'")
|
||||
unless $All_Labels2{$version};
|
||||
|
||||
my $issues2 = fetch_issues($User_Repo2, $version);
|
||||
|
||||
dump_issues( $User_Repo1, $version, $issues1 );
|
||||
dump_issues( $User_Repo2, $version, $issues2 );
|
||||
|
||||
#===================================
|
||||
sub dump_issues {
|
||||
#===================================
|
||||
my $User_Repo = shift;
|
||||
my $version = shift;
|
||||
my $issues = shift;
|
||||
|
||||
$version =~ s/v//;
|
||||
my ( $day, $month, $year ) = (gmtime)[ 3 .. 5 ];
|
||||
$month++;
|
||||
$year += 1900;
|
||||
|
||||
print <<"ASCIIDOC";
|
||||
|
||||
[[release-notes-$version]]
|
||||
== X-Pack $version Release Notes
|
||||
|
||||
// Pulled from $User_Repo
|
||||
|
||||
ASCIIDOC
|
||||
|
||||
for my $group ( @Groups, 'other' ) {
|
||||
my $group_issues = $issues->{$group} or next;
|
||||
print "[[$group-$version]]\n"
|
||||
. "[float]\n"
|
||||
. "=== $Group_Labels{$group}\n\n";
|
||||
|
||||
for my $header ( sort keys %$group_issues ) {
|
||||
my $header_issues = $group_issues->{$header};
|
||||
print( $header || 'HEADER MISSING', "::\n" );
|
||||
|
||||
for my $issue (@$header_issues) {
|
||||
my $title = $issue->{title};
|
||||
|
||||
if ( $issue->{state} eq 'open' ) {
|
||||
$title .= " [OPEN]";
|
||||
}
|
||||
unless ( $issue->{pull_request} ) {
|
||||
$title .= " [ISSUE]";
|
||||
}
|
||||
my $number = $issue->{number};
|
||||
|
||||
# print encode_utf8("* $title {pull}${number}[#${number}]");
|
||||
print encode_utf8("* $title");
|
||||
print "\n";
|
||||
print encode_utf8("// https://github.com/${User_Repo}pull/${number}[#${number}]");
|
||||
if ( my $related = $issue->{related_issues} ) {
|
||||
my %uniq = map { $_ => 1 } @$related;
|
||||
print keys %uniq > 1
|
||||
? " (issues: "
|
||||
: " (issue: ";
|
||||
# print join ", ", map {"{issue}${_}[#${_}]"}
|
||||
# print join ", ", map {"#${_}"}
|
||||
print join ", ", map {"https://github.com/${User_Repo}issues/${_}[#${_}]"}
|
||||
sort keys %uniq;
|
||||
print ")";
|
||||
}
|
||||
print "\n";
|
||||
}
|
||||
print "\n";
|
||||
}
|
||||
print "\n\n";
|
||||
}
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub fetch_issues {
|
||||
#===================================
|
||||
my $User_Repo = shift;
|
||||
my $version = shift;
|
||||
my @issues;
|
||||
my %seen;
|
||||
for my $state ( 'open', 'closed' ) {
|
||||
my $page = 1;
|
||||
while (1) {
|
||||
my $tranche
|
||||
= fetch( $User_Repo
|
||||
. 'issues?labels='
|
||||
. $version
|
||||
. '&pagesize=100&state='
|
||||
. $state
|
||||
. '&page='
|
||||
. $page )
|
||||
or die "Couldn't fetch issues for version '$version'";
|
||||
push @issues, @$tranche;
|
||||
|
||||
for my $issue (@$tranche) {
|
||||
next unless $issue->{pull_request};
|
||||
for ( $issue->{body} =~ m{(?:#|${User_Repo}issues/)(\d+)}g ) {
|
||||
$seen{$_}++;
|
||||
push @{ $issue->{related_issues} }, $_;
|
||||
}
|
||||
}
|
||||
$page++;
|
||||
last unless @$tranche;
|
||||
}
|
||||
}
|
||||
|
||||
my %group;
|
||||
ISSUE:
|
||||
for my $issue (@issues) {
|
||||
next if $seen{ $issue->{number} } && !$issue->{pull_request};
|
||||
|
||||
# uncomment for including/excluding PRs already issued in other versions
|
||||
# next if grep {$_->{name}=~/^v2/} @{$issue->{labels}};
|
||||
my %labels = map { $_->{name} => 1 } @{ $issue->{labels} };
|
||||
my ($header) = map { substr( $_, 1 ) } grep {/^:/} sort keys %labels;
|
||||
$header ||= 'NOT CLASSIFIED';
|
||||
for (@Groups) {
|
||||
if ( $labels{$_} ) {
|
||||
push @{ $group{$_}{$header} }, $issue;
|
||||
next ISSUE;
|
||||
}
|
||||
}
|
||||
push @{ $group{other}{$header} }, $issue;
|
||||
}
|
||||
|
||||
return \%group;
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub fetch_labels {
|
||||
#===================================
|
||||
my $User_Repo = shift;
|
||||
my %all;
|
||||
my $page = 1;
|
||||
while (1) {
|
||||
my $labels = fetch( $User_Repo . 'labels?page=' . $page++ )
|
||||
or die "Couldn't retrieve version labels";
|
||||
last unless @$labels;
|
||||
for (@$labels) {
|
||||
my $name = $_->{name};
|
||||
next unless $name =~ /^v/;
|
||||
$all{$name} = 1;
|
||||
}
|
||||
}
|
||||
return %all;
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub fetch {
|
||||
#===================================
|
||||
my $url = $Base_URL . shift();
|
||||
# print "$url\n";
|
||||
my $response = HTTP::Tiny->new->get($url);
|
||||
# use Data::Dumper;
|
||||
# print Dumper($response);
|
||||
die "$response->{status} $response->{reason}\n"
|
||||
unless $response->{success};
|
||||
# print $response->{content};
|
||||
return $json->decode( $response->{content} );
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub load_github_key {
|
||||
#===================================
|
||||
|
||||
my ($file) = glob("~/.github_auth");
|
||||
unless ( -e $file ) {
|
||||
warn "File ~/.github_auth doesn't exist - using anonymous API. "
|
||||
. "Generate a personal access token that has repo scope. See https://github.com/elastic/dev/blob/master/shared/development_process.md \n";
|
||||
return '';
|
||||
}
|
||||
open my $fh, $file or die "Couldn't open $file: $!";
|
||||
my ($key) = <$fh> || die "Couldn't read $file: $!";
|
||||
$key =~ s/^\s+//;
|
||||
$key =~ s/\s+$//;
|
||||
die "Invalid GitHub key: $key"
|
||||
unless $key =~ /^[0-9a-f]{40}$/;
|
||||
return "$key:x-oauth-basic@";
|
||||
|
||||
}
|
||||
|
||||
#===================================
|
||||
sub dump_labels {
|
||||
#===================================
|
||||
my %All_Labels = shift;
|
||||
my $error = shift || '';
|
||||
if ($error) {
|
||||
$error = "\nERROR: $error\n";
|
||||
}
|
||||
my $labels = join( "\n - ", '', ( sort keys %All_Labels ) );
|
||||
die <<USAGE
|
||||
$error
|
||||
USAGE: $0 version > outfile
|
||||
|
||||
Known versions:$labels
|
||||
|
||||
USAGE
|
||||
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
org.gradle.daemon=false
|
||||
org.gradle.jvmargs=-Xmx2048m
|
Binary file not shown.
|
@ -1,6 +0,0 @@
|
|||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.5-all.zip
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
zipStorePath=wrapper/dists
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
distributionSha256Sum=6ac2f8f9302f50241bf14cc5f4a3d88504ad20e61bb98c5fd048f7723b61397e
|
|
@ -1,172 +0,0 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS=""
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
NONSTOP* )
|
||||
nonstop=true
|
||||
;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin, switch paths to Windows format before running java
|
||||
if $cygwin ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=$((i+1))
|
||||
done
|
||||
case $i in
|
||||
(0) set -- ;;
|
||||
(1) set -- "$args0" ;;
|
||||
(2) set -- "$args0" "$args1" ;;
|
||||
(3) set -- "$args0" "$args1" "$args2" ;;
|
||||
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Escape application args
|
||||
save () {
|
||||
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
||||
echo " "
|
||||
}
|
||||
APP_ARGS=$(save "$@")
|
||||
|
||||
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
||||
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
||||
|
||||
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
|
||||
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
|
||||
cd "$(dirname "$0")"
|
||||
fi
|
||||
|
||||
exec "$JAVACMD" "$@"
|
|
@ -1,84 +0,0 @@
|
|||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS=
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windows variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
|
@ -5,7 +5,7 @@ dependencies {
|
|||
testCompile project(path: xpackModule('security'), configuration: 'testArtifacts')
|
||||
}
|
||||
|
||||
String outputDir = "generated-resources/${project.name}"
|
||||
String outputDir = "${buildDir}/generated-resources/${project.name}"
|
||||
task copyXPackPluginProps(type: Copy) { // wth is this?
|
||||
from project(xpackModule('core')).file('src/main/plugin-metadata')
|
||||
from project(xpackModule('core')).tasks.pluginProperties
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
File extrasDir = new File(settingsDir, '..').getCanonicalFile()
|
||||
if (extrasDir.name.endsWith('-extra') == false) {
|
||||
throw new GradleException("x-pack-elasticsearch must be checked out under an elasticsearch-extra directory, found ${extrasDir.name}")
|
||||
}
|
||||
File elasticsearchDir = new File(extrasDir.parentFile, extrasDir.name[0..-7])
|
||||
project(':').projectDir = elasticsearchDir
|
||||
apply from: "${elasticsearchDir}/settings.gradle"
|
Loading…
Reference in New Issue