remove old modules and gradle

This commit is contained in:
Shay Banon 2011-12-06 01:01:04 +02:00
parent a8fd2d48b8
commit bf0caafa04
24 changed files with 0 additions and 1391 deletions

View File

@ -1,217 +0,0 @@
import java.text.SimpleDateFormat
defaultTasks "clean", "release"
apply plugin: 'base'
archivesBaseName = 'elasticsearch'
buildTime = new Date()
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
buildTimeStr = sdf.format(buildTime)
versionNumber = '0.19.0-SNAPSHOT'
explodedDistDir = new File(distsDir, 'exploded')
explodedDistLibDir = new File(explodedDistDir, 'lib')
explodedDistBinDir = new File(explodedDistDir, 'bin')
explodedDistConfigDir = new File(explodedDistDir, 'config')
mavenRepoUrl = System.getenv("REPO_URL");
if (mavenRepoUrl == null) {
// mavenRepoUrl = "file://localhost/" + projectDir.absolutePath + "/build/maven/repository"
mavenRepoUrl = "http://oss.sonatype.org/service/local/staging/deploy/maven2/"
}
mavenSnapshotRepoUrl = System.getenv("SNAPSHOT_REPO_URL");
if (mavenSnapshotRepoUrl == null) {
// mavenSnapshotRepoUrl = "file://localhost/" + projectDir.absolutePath + "/build/maven/snapshotRepository"
mavenSnapshotRepoUrl = "http://oss.sonatype.org/content/repositories/snapshots"
}
mavenRepoUser = System.getenv("REPO_USER")
mavenRepoPass = System.getenv("REPO_PASS")
jarjarArchivePath = project(":jarjar").file("build/libs/jarjar-$versionNumber" + ".jar").absolutePath
allprojects {
group = 'org.elasticsearch'
version = versionNumber
plugins.withType(JavaPlugin).whenPluginAdded {
sourceCompatibility = 1.6
targetCompatibility = 1.6
compileJava {
sourceCompatibility = 1.6
targetCompatibility = 1.6
options.encoding = "UTF8"
}
javadoc {
maxMemory = "1g"
options.encoding = "UTF8"
}
test {
useTestNG()
String testSuiteName = project.name
suiteName = project.name
workingDir = rootProject.projectDir
options {
suiteNamte = testSuiteName
listeners << 'org.elasticsearch.util.testng.Listeners'
}
maxHeapSize = '1g'
systemProperties["es.logger.prefix"] = ""
systemProperties["es.test.log.conf"] = System.getProperty("es.test.log.conf", "log4j-gradle.properties")
systemProperties["es.test.log.conf"] = System.getProperty("es.test.log.conf", "log4j-gradle.properties")
}
dependencies {
if (project.name != "test-testng") {
testCompile project(':test-testng')
}
testCompile('org.testng:testng:6.1.1') { transitive = false }
testCompile('com.beust:jcommander:1.5') { transitive = false }
testCompile('org.beanshell:bsh:2.0b4') { transitive = false }
testCompile('org.hamcrest:hamcrest-core:1.3.RC2') { transitive = false }
testCompile('org.hamcrest:hamcrest-library:1.3.RC2') { transitive = false }
}
}
repositories {
mavenCentral()
mavenRepo urls: 'https://repository.jboss.org/nexus/content/groups/public'
mavenRepo urls: 'http://repository.codehaus.org/'
mavenRepo urls: 'http://elasticsearch.googlecode.com/svn/maven'
mavenRepo urls: 'http://oss.sonatype.org/content/repositories/releases'
mavenRepo urls: 'http://oss.sonatype.org/content/repositories/snapshots'
mavenRepo urls: 'http://download.java.net/maven/2/'
}
}
configurations {
dists
distLib {
visible = false
}
jdeb
}
dependencies {
jdeb group: 'org.vafer', name: 'jdeb', version: '0.8'
}
//task run(dependsOn: [configurations.distLib], description: 'Runs') << {
// ant.java(classname: "org.elasticsearch.bootstrap.Bootstrap", fork: "true", classpath: configurations.distLib.asPath,
// jvmargs: "-Des-foreground=yes")
//}
dependencies {
distLib project(':elasticsearch')
}
task explodedDist(dependsOn: [configurations.distLib], description: 'Builds a minimal distribution image') << {
ant.delete(dir: explodedDistDir) // clean the exploded dir
[explodedDistDir, explodedDistLibDir, explodedDistBinDir, explodedDistConfigDir]*.mkdirs()
// remove old elasticsearch files
ant.delete { fileset(dir: explodedDistLibDir, includes: "$archivesBaseName-*.jar") }
copy {
from configurations.distLib
into explodedDistLibDir
}
copy { from('bin'); into explodedDistBinDir }
copy { from('config'); into explodedDistConfigDir }
copy { from('lib'); into explodedDistLibDir }
copy {
from('.')
into explodedDistDir
include 'LICENSE.txt'
include 'NOTICE.txt'
include 'README.textile'
}
ant.replace(dir: explodedDistBinDir, token: "@ES_VERSION@", value: versionNumber)
ant.delete { fileset(dir: explodedDistLibDir, includes: "$archivesBaseName-*-javadoc.jar") }
ant.delete { fileset(dir: explodedDistLibDir, includes: "$archivesBaseName-*-sources.jar") }
ant.delete { fileset(dir: explodedDistLibDir, includes: "slf4j-*.jar") } // no need for slf4j
ant.delete { fileset(dir: explodedDistLibDir, includes: "jarjar-*.jar") } // no need jackson, we jarjar it
ant.delete { fileset(dir: explodedDistLibDir, includes: "sigar-*.jar") } // no need sigar directly under lib...
// move relevant jars to ext
// ant.move(todir: "$explodedDistLibDir/ext") {
// fileset(dir: explodedDistLibDir) {
// include(name: '*jline*')
// include(name: '*log4j*')
// include(name: '*jna*')
// }
// }
ant.chmod(dir: "$explodedDistDir/bin", perm: "ugo+rx", includes: "**/*")
}
task zip(type: Zip, dependsOn: ['explodedDist']) {
rootFolder = "$archivesBaseName-${-> version}"
from(explodedDistDir) {
into rootFolder
exclude 'bin/elasticsearch'
exclude 'bin/plugin'
}
from(explodedDistDir) {
into rootFolder
include 'bin/elasticsearch'
include 'bin/plugin'
fileMode = 0755
}
}
task tar(type: Tar, dependsOn: ['explodedDist']) {
compression = Compression.GZIP
extension = "tar.gz"
rootFolder = "$archivesBaseName-${-> version}"
from(explodedDistDir) {
into rootFolder
exclude 'bin/*.bat'
exclude 'bin/elasticsearch'
exclude 'bin/plugin'
exclude 'lib/sigar/*win*'
}
from(explodedDistDir) {
into rootFolder
include 'bin/elasticsearch'
include 'bin/plugin'
fileMode = 0755
}
}
task deb(dependsOn: ['explodedDist']) << {
ant.taskdef(name: "deb", classname: "org.vafer.jdeb.ant.DebAntTask", classpath: configurations.jdeb.asPath)
ant.copy(todir: "${distsDir}/debian") {
fileset(dir: "pkg/debian/control")
filterset(begintoken: "[[", endtoken: "]]") {
filter(token: "version", value: "${version}")
}
}
ant.deb(destfile: "${distsDir}/${archivesBaseName}-${version}-1_all.deb", control: "${distsDir}/debian", verbose: "true") {
tarfileset(dir: explodedDistDir, prefix: "/usr/share/elasticsearch", includes: "*.txt, *.textile", username: "root", group: "root")
tarfileset(dir: explodedDistBinDir, prefix: "/usr/share/elasticsearch/bin", excludes: "*.bat", filemode: "755", username: "root", group: "root")
tarfileset(dir: explodedDistLibDir, prefix: "/usr/share/elasticsearch/lib", includes: "*.jar, sigar/*", username: "root", group: "root")
tarfileset(dir: explodedDistConfigDir, prefix: "/etc/elasticsearch", username: "root", group: "root")
tarfileset(dir: "pkg/debian/init.d", includes: "elasticsearch", prefix: "/etc/init.d", filemode: "755", username: "root", group: "root")
tarfileset(dir: "pkg/debian/default", includes: "elasticsearch", prefix: "/etc/default", username: "root", group: "root")
}
ant.delete(dir: "${distsDir}/debian")
}
task release(dependsOn: [zip, tar, deb]) << {
ant.delete(dir: explodedDistDir)
}
task wrapper(type: Wrapper) {
gradleVersion = '1.0-milestone-3'
}

Binary file not shown.

View File

@ -1,6 +0,0 @@
#Mon Jun 13 14:36:56 IDT 2011
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=http\://repo.gradle.org/gradle/distributions/gradle-1.0-milestone-3-bin.zip

168
gradlew vendored
View File

@ -1,168 +0,0 @@
#!/bin/bash
##############################################################################
## ##
## Gradle wrapper script for UN*X ##
## ##
##############################################################################
# Uncomment those lines to set JVM options. GRADLE_OPTS and JAVA_OPTS can be used together.
GRADLE_OPTS="$GRADLE_OPTS -Xmx512m"
# JAVA_OPTS="$JAVA_OPTS -Xmx512m"
GRADLE_APP_NAME=Gradle
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set JAVA_HOME if it's not already set.
if [ -z "$JAVA_HOME" ] ; then
if $darwin ; then
[ -z "$JAVA_HOME" -a -d "/Library/Java/Home" ] && export JAVA_HOME="/Library/Java/Home"
[ -z "$JAVA_HOME" -a -d "/System/Library/Frameworks/JavaVM.framework/Home" ] && export JAVA_HOME="/System/Library/Frameworks/JavaVM.framework/Home"
else
javaExecutable="`which javac`"
[ -z "$javaExecutable" -o "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ] && die "JAVA_HOME not set and cannot find javac to deduce location, please set JAVA_HOME."
# readlink(1) is not available as standard on Solaris 10.
readLink=`which readlink`
[ `expr "$readLink" : '\([^ ]*\)'` = "no" ] && die "JAVA_HOME not set and readlink not available, please set JAVA_HOME."
javaExecutable="`readlink -f \"$javaExecutable\"`"
javaHome="`dirname \"$javaExecutable\"`"
javaHome=`expr "$javaHome" : '\(.*\)/bin'`
export JAVA_HOME="$javaHome"
fi
fi
# For Cygwin, ensure paths are in UNIX format before anything is touched.
if $cygwin ; then
[ -n "$JAVACMD" ] && JAVACMD=`cygpath --unix "$JAVACMD"`
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
fi
STARTER_MAIN_CLASS=org.gradle.wrapper.GradleWrapperMain
CLASSPATH=`dirname "$0"`/gradle/wrapper/gradle-wrapper.jar
WRAPPER_PROPERTIES=`dirname "$0"`/gradle/wrapper/gradle-wrapper.properties
# Determine the Java command to use to start the JVM.
if [ -z "$JAVACMD" ] ; then
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
else
JAVACMD="java"
fi
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
if [ -z "$JAVA_HOME" ] ; then
warn "JAVA_HOME environment variable is not set"
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query businessSystem maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add GRADLE_APP_NAME to the JAVA_OPTS as -Xdock:name
if $darwin; then
JAVA_OPTS="$JAVA_OPTS -Xdock:name=$GRADLE_APP_NAME"
# we may also want to set -Xdock:image
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
JAVA_HOME=`cygpath --path --mixed "$JAVA_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
GRADLE_APP_BASE_NAME=`basename "$0"`
exec "$JAVACMD" $JAVA_OPTS $GRADLE_OPTS \
-classpath "$CLASSPATH" \
-Dorg.gradle.appname="$GRADLE_APP_BASE_NAME" \
-Dorg.gradle.wrapper.properties="$WRAPPER_PROPERTIES" \
$STARTER_MAIN_CLASS \
"$@"

82
gradlew.bat vendored
View File

@ -1,82 +0,0 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem ##
@rem Gradle startup script for Windows ##
@rem ##
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Uncomment those lines to set JVM options. GRADLE_OPTS and JAVA_OPTS can be used together.
set GRADLE_OPTS=%GRADLE_OPTS% -Xmx512m
@rem set JAVA_OPTS=%JAVA_OPTS% -Xmx512m
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.\
@rem Find java.exe
set JAVA_EXE=java.exe
if not defined JAVA_HOME goto init
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
echo.
goto end
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set STARTER_MAIN_CLASS=org.gradle.wrapper.GradleWrapperMain
set CLASSPATH=%DIRNAME%\gradle\wrapper\gradle-wrapper.jar
set WRAPPER_PROPERTIES=%DIRNAME%\gradle\wrapper\gradle-wrapper.properties
set GRADLE_OPTS=%JAVA_OPTS% %GRADLE_OPTS% -Dorg.gradle.wrapper.properties="%WRAPPER_PROPERTIES%"
@rem Execute Gradle
"%JAVA_EXE%" %GRADLE_OPTS% -classpath "%CLASSPATH%" %STARTER_MAIN_CLASS% %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
if not "%OS%"=="Windows_NT" echo 1 > nul | choice /n /c:1
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit "%ERRORLEVEL%"
exit /b "%ERRORLEVEL%"
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -1,16 +0,0 @@
dependsOn(':elasticsearch')
apply plugin: 'java'
archivesBaseName = "$rootProject.archivesBaseName-$project.archivesBaseName"
configurations.compile.transitive = true
configurations.testCompile.transitive = true
// no need to use the resource dir
sourceSets.main.resources.srcDirs 'src/main/java'
sourceSets.test.resources.srcDirs 'src/test/java'
dependencies {
compile project(':elasticsearch')
}

View File

@ -1,18 +0,0 @@
log4j.rootLogger=INFO, out
log4j.logger.jgroups=WARN
#log4j.logger.discovery=TRACE
#log4j.logger.cluster=TRACE
#log4j.logger.indices.cluster=DEBUG
#log4j.logger.index=TRACE
#log4j.logger.index.engine=DEBUG
#log4j.logger.index.shard=TRACE
#log4j.logger.index.cache=DEBUG
#log4j.logger.http=TRACE
#log4j.logger.monitor.jvm=DEBUG
#log4j.logger.cluster.action.shard=TRACE
#log4j.logger.index.gateway=TRACE
log4j.appender.out=org.apache.log4j.ConsoleAppender
log4j.appender.out.layout=org.apache.log4j.PatternLayout
log4j.appender.out.layout.ConversionPattern=[%d{ABSOLUTE}][%-5p][%-25c] %m%n

View File

@ -1,132 +0,0 @@
dependsOn(':test-testng')
apply plugin: 'java'
apply plugin: 'maven'
apply plugin: 'eclipse'
archivesBaseName = "$project.archivesBaseName"
// make sure we jarjar things before we compile Java
compileJava.dependsOn << ":jarjar:jar"
processResources.doLast {
Properties versionProps = new Properties();
versionProps.setProperty("number", rootProject.version)
versionProps.setProperty("date", buildTimeStr)
File versionFile = new File(sourceSets.main.classesDir, "/org/elasticsearch/version.properties")
versionFile.parentFile.mkdirs()
versionFile.withOutputStream {
versionProps.store(it, '')
}
}
// no need to use the resource dir
sourceSets.main.resources.srcDirs 'src/main/java'
sourceSets.test.resources.srcDirs 'src/test/java'
dependencies {
compile files(rootProject.jarjarArchivePath)
compile('jline:jline:0.9.94') { transitive = false }
compile files(rootProject.file("lib/sigar/sigar-1.6.4.jar").absolutePath)
compile 'org.slf4j:slf4j-api:1.5.11'
compile('org.slf4j:slf4j-log4j12:1.5.11') { transitive = false }
compile('log4j:log4j:1.2.16') { transitive = false }
compile('net.java.dev.jna:jna:3.2.7') { transitive = false }
compile('org.apache.lucene:lucene-core:3.5.0') { transitive = false }
compile('org.apache.lucene:lucene-analyzers:3.5.0') { transitive = false }
compile('org.apache.lucene:lucene-queries:3.5.0') { transitive = false }
compile('org.apache.lucene:lucene-memory:3.5.0') { transitive = false }
compile('org.apache.lucene:lucene-highlighter:3.5.0') { transitive = false }
}
configurations {
deployerJars
tools
}
dependencies {
deployerJars "org.apache.maven.wagon:wagon-http:1.0-beta-2"
}
jar {
// from sourceSets.main.allJava
manifest {
attributes("Implementation-Title": "ElasticSearch", "Implementation-Version": rootProject.version, "Implementation-Date": buildTimeStr)
}
}
jar << {
mergedArchivePath = new File(jar.archivePath.absolutePath + ".merged.jar")
project.ant {
jar(destfile: mergedArchivePath) {
zipFileSet(src: jar.archivePath)
zipFileSet(src: rootProject.jarjarArchivePath)
}
delete(file: jar.archivePath)
move(file: mergedArchivePath, tofile: jar.archivePath)
}
}
task sourcesJar(type: Jar, dependsOn: classes) {
classifier = 'sources'
from sourceSets.main.allSource
}
task javadocJar(type: Jar, dependsOn: javadoc) {
classifier = 'javadoc'
from javadoc.destinationDir
}
artifacts {
archives sourcesJar
archives javadocJar
}
uploadArchives {
repositories.mavenDeployer {
configuration = configurations.deployerJars
repository(url: rootProject.mavenRepoUrl) {
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
}
snapshotRepository(url: rootProject.mavenSnapshotRepoUrl) {
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
}
pom.project {
inceptionYear '2009'
name 'elasticsearch'
description 'Open Source, Distributed, RESTful Search Engine'
licenses {
license {
name 'The Apache Software License, Version 2.0'
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
distribution 'repo'
}
}
scm {
connection 'git://github.com/elasticsearch/elasticsearch.git'
developerConnection 'git@github.com:elasticsearch/elasticsearch.git'
url 'http://github.com/elasticsearch/elasticsearch'
}
}
pom.whenConfigured {pom ->
pom.dependencies = pom.dependencies.findAll {dep -> dep.scope != 'test' } // removes the test scoped ones
pom.dependencies = pom.dependencies.findAll {dep -> !dep.artifactId.contains('jna') } // remove jna, its optional
pom.dependencies = pom.dependencies.findAll {dep -> !dep.artifactId.contains('jarjar') }
pom.dependencies = pom.dependencies.findAll {dep -> !dep.artifactId.contains('log4j') }
pom.dependencies = pom.dependencies.findAll {dep -> !dep.artifactId.contains('slf4j') }
pom.dependencies = pom.dependencies.findAll {dep -> !dep.artifactId.contains('jline') }
}
}
}
eclipseClasspath {
defaultOutputDir = file('build/eclipse-build')
}

View File

@ -1,91 +0,0 @@
apply plugin: 'java'
archivesBaseName = "$project.archivesBaseName"
dependencies {
runtime 'com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.2'
runtime 'com.google.guava:guava:r09'
runtime 'org.elasticsearch:es-trove:3.0.0'
runtime 'org.elasticsearch:es-jsr166y:20110918'
runtime 'org.elasticsearch:es-jsr166e:20110918'
runtime 'commons-codec:commons-codec:1.5'
// joda 2.0 moved to using volatile fields for datetime, which hurts performance for facets!
runtime 'joda-time:joda-time:1.6.2'
runtime 'org.mvel:mvel2:2.1.Beta7'
runtime 'org.codehaus.jackson:jackson-core-asl:1.9.2'
runtime 'org.codehaus.jackson:jackson-smile:1.9.2'
runtime 'org.yaml:snakeyaml:1.6'
runtime('org.jboss.netty:netty:3.2.7.Final') { transitive = false }
}
configurations {
tools
}
dependencies {
tools "org.elasticsearch:es-jarjar:1.0.0"
}
jar << {
jarjarArchivePath = new File(jar.archivePath.absolutePath + ".jarjar.jar")
project.ant {
taskdef name: "jarjar", classname: "com.tonicsystems.jarjar.JarJarTask", classpath: configurations.tools.asPath
jarjar(jarfile: jarjarArchivePath) {
zipfileset(src: jar.archivePath)
configurations.runtime.files.findAll {file ->
['inject', 'codec', 'mvel', 'jackson', 'joda', 'snakeyaml', 'netty', 'guice', 'javax.inject', 'aopalliance', 'guava', 'concurrentlinkedhashmap', 'trove', 'jsr166y', 'jsr166e'].any { file.name.contains(it) }
}.each { jarjarFile ->
zipfileset(src: jarjarFile) {
exclude(name: "META-INF/**")
}
}
rule pattern: "com.googlecode.concurrentlinkedhashmap.**", result: "org.elasticsearch.common.concurrentlinkedhashmap.@1"
rule pattern: "com.google.common.**", result: "org.elasticsearch.common.@1"
rule pattern: "gnu.trove.**", result: "org.elasticsearch.common.trove.@1"
rule pattern: "jsr166y.**", result: "org.elasticsearch.common.util.concurrent.jsr166y.@1"
rule pattern: "jsr166e.**", result: "org.elasticsearch.common.util.concurrent.jsr166e.@1"
rule pattern: "org.aopalliance.**", result: "org.elasticsearch.common.aopalliance.@1"
rule pattern: "javax.inject.**", result: "org.elasticsearch.common.inject.jex.@1"
rule pattern: "com.google.inject.**", result: "org.elasticsearch.common.inject.@1"
rule pattern: "org.apache.commons.codec.**", result: "org.elasticsearch.common.codec.@1"
rule pattern: "org.mvel2.**", result: "org.elasticsearch.common.mvel2.@1"
rule pattern: "org.codehaus.jackson.**", result: "org.elasticsearch.common.jackson.@1"
rule pattern: "org.yaml.**", result: "org.elasticsearch.common.yaml.@1"
rule pattern: "org.joda.**", result: "org.elasticsearch.common.joda.@1"
rule pattern: "org.jboss.netty.**", result: "org.elasticsearch.common.netty.@1"
}
delete(file: jar.archivePath)
copy(file: jarjarArchivePath, tofile: jar.archivePath)
delete(file: jarjarArchivePath)
// seems like empty dirs still exists, unjar and clean them
unjar(src: jar.archivePath, dest: "build/tmp/extracted")
delete(file: "build/tmp/extracted/NOTICE")
delete(file: "build/tmp/extracted/LICENSE")
delete(file: "build/tmp/extracted/build.properties")
delete(dir: "build/tmp/extracted/jsr166y")
delete(dir: "build/tmp/extracted/jsr166e")
delete(dir: "build/tmp/extracted/gnu")
delete(dir: "build/tmp/extracted/org/codehaus")
delete(dir: "build/tmp/extracted/org/aopalliance")
delete(dir: "build/tmp/extracted/com/google")
delete(dir: "build/tmp/extracted/org/mvel2")
delete(dir: "build/tmp/extracted/org/joda")
delete(dir: "build/tmp/extracted/org/yaml")
delete(dir: "build/tmp/extracted/org/jboss")
delete(file: jar.archivePath)
jar(destfile: jar.archivePath, basedir: "build/tmp/extracted")
delete(dir: "build/tmp/extracted")
}
}

View File

@ -1,22 +0,0 @@
dependsOn(':elasticsearch')
apply plugin: 'java'
apply plugin: 'eclipse'
archivesBaseName = "$rootProject.archivesBaseName-$project.archivesBaseName"
configurations.compile.transitive = true
configurations.testCompile.transitive = true
// no need to use the resource dir
sourceSets.main.resources.srcDirs 'src/main/java'
sourceSets.test.resources.srcDirs 'src/test/java'
dependencies {
compile project(':elasticsearch')
}
eclipseClasspath {
defaultOutputDir = file('build/eclipse-build')
}

View File

@ -1,20 +0,0 @@
apply plugin: 'java'
apply plugin: 'eclipse'
archivesBaseName = "$rootProject.archivesBaseName-$project.archivesBaseName"
configurations.compile.transitive = true
configurations.testCompile.transitive = true
// no need to use the resource dir
sourceSets.main.resources.srcDirs 'src/main/java'
sourceSets.test.resources.srcDirs 'src/test/java'
dependencies {
compile('org.testng:testng:6.1.1') { transitive = false }
compile('log4j:log4j:1.2.16') { transitive = false }
}
eclipseClasspath {
defaultOutputDir = file('build/eclipse-build')
}

View File

@ -1,13 +0,0 @@
log4j.rootLogger=INFO, out
log4j.logger.jgroups=WARN
#log4j.logger.index=DEBUG
#log4j.logger.http=TRACE
#log4j.logger.monitor.memory=TRACE
#log4j.logger.gateway=TRACE
log4j.appender.out=org.apache.log4j.FileAppender
log4j.appender.out.file=${test.log.dir}/${test.log.name}.log
log4j.appender.out.append=true
log4j.appender.out.layout=org.apache.log4j.PatternLayout
log4j.appender.out.layout.ConversionPattern=[%d{ABSOLUTE}][%-5p][%-25c] %m%n

View File

@ -1,24 +0,0 @@
log4j.rootLogger=INFO, out
log4j.logger.jgroups=WARN
#log4j.logger.action=DEBUG
#log4j.logger.gateway=DEBUG
#log4j.logger.transport=TRACE
#log4j.logger.discovery=TRACE
#log4j.logger.cluster.service=TRACE
#log4j.logger.cluster.action.shard=DEBUG
#log4j.logger.indices.cluster=DEBUG
#log4j.logger.index=TRACE
#log4j.logger.index.engine=DEBUG
#log4j.logger.index.shard.service=DEBUG
#log4j.logger.index.shard.recovery=DEBUG
#log4j.logger.index.cache=DEBUG
#log4j.logger.http=TRACE
#log4j.logger.monitor.memory=TRACE
#log4j.logger.monitor.memory=TRACE
#log4j.logger.cluster.action.shard=TRACE
#log4j.logger.index.gateway=TRACE
log4j.appender.out=org.apache.log4j.ConsoleAppender
log4j.appender.out.layout=org.apache.log4j.PatternLayout
log4j.appender.out.layout.ConversionPattern=[%d{ABSOLUTE}][%-5p][%-25c] %m%n

View File

@ -1,51 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.util.testng;
import org.testng.ITestResult;
import org.testng.TestListenerAdapter;
/**
* @author kimchy (Shay Banon)
*/
public class DotTestListener extends TestListenerAdapter {
private int count = 1;
@Override public void onTestFailure(ITestResult tr) {
log("F");
}
@Override public void onTestSkipped(ITestResult tr) {
log("S");
}
@Override public void onTestSuccess(ITestResult tr) {
log(".");
}
private void log(String string) {
System.err.print(string);
if (count++ % 40 == 0) {
System.err.println("");
}
}
}

View File

@ -1,74 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.util.testng;
import org.testng.ITestContext;
import org.testng.ITestListener;
import org.testng.ITestResult;
/**
* @author kimchy (Shay Banon)
*/
public class Listeners implements ITestListener {
private final ITestListener[] listeners = new ITestListener[]{new LoggingListener()};
@Override public void onTestStart(ITestResult result) {
for (ITestListener listener : listeners) {
listener.onTestStart(result);
}
}
@Override public void onTestSuccess(ITestResult result) {
for (ITestListener listener : listeners) {
listener.onTestSuccess(result);
}
}
@Override public void onTestFailure(ITestResult result) {
for (ITestListener listener : listeners) {
listener.onTestFailure(result);
}
}
@Override public void onTestSkipped(ITestResult result) {
for (ITestListener listener : listeners) {
listener.onTestSkipped(result);
}
}
@Override public void onTestFailedButWithinSuccessPercentage(ITestResult result) {
for (ITestListener listener : listeners) {
listener.onTestFailedButWithinSuccessPercentage(result);
}
}
@Override public void onStart(ITestContext context) {
for (ITestListener listener : listeners) {
listener.onStart(context);
}
}
@Override public void onFinish(ITestContext context) {
for (ITestListener listener : listeners) {
listener.onFinish(context);
}
}
}

View File

@ -1,102 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.util.testng;
import org.apache.log4j.PropertyConfigurator;
import org.testng.ITestContext;
import org.testng.ITestResult;
import org.testng.TestListenerAdapter;
import java.io.File;
import java.io.IOException;
import java.util.Properties;
/**
* @author kimchy (shay.banon)
*/
public class LoggingListener extends TestListenerAdapter {
@Override public void onStart(ITestContext context) {
String logsDir = context.getOutputDirectory() + "/logs";
deleteRecursively(new File(logsDir), false);
System.setProperty("test.log.dir", logsDir);
System.setProperty("es.path.data", context.getOutputDirectory() + "/data");
System.setProperty("es.path.work", context.getOutputDirectory() + "/work");
}
@Override public void onTestStart(ITestResult result) {
String logName = result.getTestClass().getName();
if (logName.startsWith("org.elasticsearch.")) {
logName = logName.substring("org.elasticsearch.".length());
}
System.setProperty("test.log.name", logName);
Properties props = new Properties();
try {
props.load(LoggingListener.class.getClassLoader().getResourceAsStream(System.getProperty("es.test.log.conf", "log4j.properties")));
} catch (IOException e) {
e.printStackTrace();
}
PropertyConfigurator.configure(props);
org.apache.log4j.Logger.getLogger("testng").info("========== Starting Test [" + result.getName() + "] ==========");
}
@Override public void onTestSuccess(ITestResult result) {
org.apache.log4j.Logger.getLogger("testng").info("========== Test Success [" + result.getName() + "] ==========");
}
@Override public void onTestFailure(ITestResult result) {
org.apache.log4j.Logger.getLogger("testng").info("========== Test Failure [" + result.getName() + "] ==========");
}
@Override public void onTestSkipped(ITestResult result) {
org.apache.log4j.Logger.getLogger("testng").info("========== Test Skipped [" + result.getName() + "] ==========");
}
/**
* Delete the supplied {@link java.io.File} - for directories,
* recursively delete any nested directories or files as well.
*
* @param root the root <code>File</code> to delete
* @param deleteRoot whether or not to delete the root itself or just the content of the root.
* @return <code>true</code> if the <code>File</code> was deleted,
* otherwise <code>false</code>
*/
public static boolean deleteRecursively(File root, boolean deleteRoot) {
if (root != null && root.exists()) {
if (root.isDirectory()) {
File[] children = root.listFiles();
if (children != null) {
for (File aChildren : children) {
deleteRecursively(aChildren, true);
}
}
}
if (deleteRoot) {
return root.delete();
} else {
return true;
}
}
return false;
}
}

View File

@ -1,4 +0,0 @@
/etc/init.d/elasticsearch
/etc/default/elasticsearch
/etc/elasticsearch/logging.yml
/etc/elasticsearch/elasticsearch.yml

View File

@ -1,36 +0,0 @@
Package: elasticsearch
Version: [[version]]
Architecture: all
Maintainer: Nicolas Huray <nicolas.huray@gmail.com>
Depends: openjdk-6-jre-headless | sun-java6-jre
Section: web
Priority: optional
Homepage: http://www.elasticsearch.org/
Description: Open Source, Distributed, RESTful Search Engine
ElasticSearch is a distributed RESTful search engine built for the cloud.
.
Features include:
.
* Distributed and Highly Available Search Engine.
- Each index is fully sharded with a configurable number of shards.
- Each shard can have one or more replicas.
- Read / Search operations performed on either one of the replica shard.
* Multi Tenant with Multi Types.
- Support for more than one index.
- Support for more than one type per index.
- Index level configuration (number of shards, index storage, ...).
* Various set of APIs
- HTTP RESTful API
- Native Java API.
- All APIs perform automatic node operation rerouting.
* Document oriented
- No need for upfront schema definition.
- Schema can be defined per type for customization of the indexing process.
* Reliable, Asynchronous Write Behind for long term persistency.
* (Near) Real Time Search.
* Built on top of Lucene
- Each shard is a fully functional Lucene index
- All the power of Lucene easily exposed through simple configuration / plugins.
* Per operation consistency
- Single document level operations are atomic, consistent, isolated and durable.
* Open Source under Apache 2 License.

View File

@ -1,39 +0,0 @@
#!/bin/sh
set -e
case "$1" in
configure)
[ -f /etc/default/elasticsearch ] && . /etc/default/elasticsearch
[ -z "$ES_USER" ] && ES_USER="elasticsearch"
[ -z "$ES_GROUP" ] && ES_GROUP="elasticsearch"
if ! getent group "$ES_GROUP" > /dev/null 2>&1 ; then
addgroup --system "$ES_GROUP" --quiet
fi
if ! id $ES_USER > /dev/null 2>&1 ; then
adduser --system --home /usr/share/elasticsearch --no-create-home \
--ingroup "$ES_GROUP" --disabled-password --shell /bin/false \
"$ES_USER"
fi
# Set user permissions on /var/log/elasticsearch and /var/lib/elasticsearch
mkdir -p /var/log/elasticsearch /var/lib/elasticsearch
chown -R $ES_USER:$ES_GROUP /var/log/elasticsearch /var/lib/elasticsearch
chmod 755 /var/log/elasticsearch /var/lib/elasticsearch
# configuration files should not be modifiable by elasticsearch user, as this can be a security issue
chown -Rh root:root /etc/elasticsearch/*
chmod 755 /etc/elasticsearch
chmod 644 /etc/elasticsearch/*
;;
esac
if [ -x "/etc/init.d/elasticsearch" ]; then
update-rc.d elasticsearch defaults 95 10 >/dev/null
if [ -x "`which invoke-rc.d 2>/dev/null`" ]; then
invoke-rc.d elasticsearch start || true
else
/etc/init.d/elasticsearch start || true
fi
fi

View File

@ -1,30 +0,0 @@
#!/bin/sh
set -e
case "$1" in
remove)
# Remove logs and data
rm -rf /var/log/elasticsearch /var/lib/elasticsearch
;;
purge)
# Remove service
update-rc.d elasticsearch remove >/dev/null || true
# Remove logs and data
rm -rf /var/log/elasticsearch /var/lib/elasticsearch
# Remove user/group
deluser elasticsearch || true
delgroup elasticsearch || true
;;
upgrade|failed-upgrade|abort-install|abort-upgrade|disappear)
# Nothing to do here
;;
*)
echo "$0 called with unknown argument \`$1'" >&2
exit 1
;;
esac

View File

@ -1,10 +0,0 @@
#!/bin/sh
set -e
if [ -x "/etc/init.d/elasticsearch" ]; then
if [ -x "`which invoke-rc.d 2>/dev/null`" ]; then
invoke-rc.d elasticsearch stop || true
else
/etc/init.d/elasticsearch stop || true
fi
fi

View File

@ -1,24 +0,0 @@
# Run ElasticSearch as this user ID and group ID
ES_USER=elasticsearch
ES_GROUP=elasticsearch
# Minimum Heap memory to run ElasticSearch
ES_MIN_MEM=256m
# Maximum Heap memory to run ElasticSearch
ES_MAX_MEM=1g
# ElasticSearch log directory
LOG_DIR=/var/log/elasticsearch
# ElasticSearch data directory
DATA_DIR=/var/lib/elasticsearch
# ElasticSearch work directory
WORK_DIR=/tmp/elasticsearch
# ElasticSearch configuration directory
CONF_DIR=/etc/elasticsearch
# ElasticSearch configuration file (elasticsearch.yml)
CONF_FILE=/etc/elasticsearch/elasticsearch.yml

View File

@ -1,192 +0,0 @@
#!/bin/sh
#
# /etc/init.d/elasticsearch -- startup script for Elasticsearch
#
# Written by Miquel van Smoorenburg <miquels@cistron.nl>.
# Modified for Debian GNU/Linux by Ian Murdock <imurdock@gnu.ai.mit.edu>.
# Modified for Tomcat by Stefan Gybas <sgybas@debian.org>.
# Modified for Tomcat6 by Thierry Carrez <thierry.carrez@ubuntu.com>.
# Additional improvements by Jason Brittain <jason.brittain@mulesoft.com>.
# Modified by Nicolas Huray for ElasticSearch <nicolas.huray@gmail.com>.
#
### BEGIN INIT INFO
# Provides: elasticsearch
# Required-Start: $all
# Required-Stop: $all
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Starts elasticsearch
# Description: Starts elasticsearch using start-stop-daemon
### END INIT INFO
set -e
PATH=/bin:/usr/bin:/sbin:/usr/sbin
NAME=elasticsearch
DESC="ElasticSearch Server"
DEFAULT=/etc/default/$NAME
if [ `id -u` -ne 0 ]; then
echo "You need root privileges to run this script"
exit 1
fi
. /lib/lsb/init-functions
if [ -r /etc/default/rcS ]; then
. /etc/default/rcS
fi
# The following variables can be overwritten in $DEFAULT
# Run ElasticSearch as this user ID and group ID
ES_USER=elasticsearch
ES_GROUP=elasticsearch
# The first existing directory is used for JAVA_HOME (if JAVA_HOME is not defined in $DEFAULT)
JDK_DIRS="/usr/lib/jvm/java-7-openjdk /usr/lib/jvm/java-7-oracle /usr/lib/jvm/java-6-openjdk /usr/lib/jvm/java-6-sun"
# Look for the right JVM to use
for jdir in $JDK_DIRS; do
if [ -r "$jdir/bin/java" -a -z "${JAVA_HOME}" ]; then
JAVA_HOME="$jdir"
fi
done
export JAVA_HOME
# Directory where the ElasticSearch binary distribution resides
ES_HOME=/usr/share/$NAME
# Minimum Heap memory to run ElasticSearch
ES_MIN_MEM=256m
# Maximum Heap memory to run ElasticSearch
ES_MAX_MEM=1g
# ElasticSearch log directory
LOG_DIR=/var/log/$NAME
# ElasticSearch data directory
DATA_DIR=/var/lib/$NAME
# ElasticSearch work directory
WORK_DIR=/tmp/$NAME
# ElasticSearch configuration directory
CONF_DIR=/etc/$NAME
# ElasticSearch configuration file (elasticsearch.yml)
CONF_FILE=$CONF_DIR/elasticsearch.yml
# End of variables that can be overwritten in $DEFAULT
# overwrite settings from default file
if [ -f "$DEFAULT" ]; then
. "$DEFAULT"
fi
# Define other required variables
PID_FILE=/var/run/$NAME.pid
DAEMON=$ES_HOME/bin/elasticsearch
DAEMON_OPTS="-p $PID_FILE -Des.config=$CONF_FILE -Des.path.home=$ES_HOME -Des.path.logs=$LOG_DIR -Des.path.data=$DATA_DIR -Des.path.work=$WORK_DIR -Des.path.conf=$CONF_DIR"
export ES_MIN_MEM ES_MAX_MEM
# Check DAEMON exists
test -x $DAEMON || exit 0
case "$1" in
start)
if [ -z "$JAVA_HOME" ]; then
log_failure_msg "no JDK found - please set JAVA_HOME"
exit 1
fi
log_daemon_msg "Starting $DESC"
if start-stop-daemon --test --start --pidfile "$PID_FILE" \
--user "$ES_USER" --exec "$JAVA_HOME/bin/java" \
>/dev/null; then
# Prepare environment
mkdir -p "$LOG_DIR" "$DATA_DIR" "$WORK_DIR" && chown "$ES_USER":"$ES_GROUP" "$LOG_DIR" "$DATA_DIR" "$WORK_DIR"
touch "$PID_FILE" && chown "$ES_USER":"$ES_GROUP" "$PID_FILE"
ulimit -n 65535
# Start Daemon
start-stop-daemon --start -b --user "$ES_USER" -c "$ES_USER" --pidfile "$PID_FILE" --exec /bin/bash -- -c "$DAEMON $DAEMON_OPTS"
sleep 1
if start-stop-daemon --test --start --pidfile "$PID_FILE" \
--user "$ES_USER" --exec "$JAVA_HOME/bin/java" \
>/dev/null; then
if [ -f "$PID_FILE" ]; then
rm -f "$PID_FILE"
fi
log_end_msg 1
else
log_end_msg 0
fi
else
log_progress_msg "(already running)"
log_end_msg 0
fi
;;
stop)
log_daemon_msg "Stopping $DESC"
set +e
if [ -f "$PID_FILE" ]; then
start-stop-daemon --stop --pidfile "$PID_FILE" \
--user "$ES_USER" \
--retry=TERM/20/KILL/5 >/dev/null
if [ $? -eq 1 ]; then
log_progress_msg "$DESC is not running but pid file exists, cleaning up"
elif [ $? -eq 3 ]; then
PID="`cat $PID_FILE`"
log_failure_msg "Failed to stop $DESC (pid $PID)"
exit 1
fi
rm -f "$PID_FILE"
else
log_progress_msg "(not running)"
fi
log_end_msg 0
set -e
;;
status)
set +e
start-stop-daemon --test --start --pidfile "$PID_FILE" \
--user "$ES_USER" --exec "$JAVA_HOME/bin/java" \
>/dev/null 2>&1
if [ "$?" = "0" ]; then
if [ -f "$PID_FILE" ]; then
log_success_msg "$DESC is not running, but pid file exists."
exit 1
else
log_success_msg "$DESC is not running."
exit 3
fi
else
log_success_msg "$DESC is running with pid `cat $PID_FILE`"
fi
set -e
;;
restart|force-reload)
if [ -f "$PID_FILE" ]; then
$0 stop
sleep 1
fi
$0 start
;;
*)
log_success_msg "Usage: $0 {start|stop|restart|force-reload|status}"
exit 1
;;
esac
exit 0

View File

@ -1,20 +0,0 @@
include 'test-testng'
include 'jarjar'
include 'elasticsearch'
include 'test-integration'
include 'benchmark-micro'
rootProject.name = 'elasticsearch-root'
rootProject.children.each {project ->
String fileBaseName = project.name.replaceAll("\\p{Upper}") { "-${it.toLowerCase()}" }
fileBaseName = fileBaseName.replace('-', '/');
if (fileBaseName.startsWith("plugins")) {
project.projectDir = new File(settingsDir, "$fileBaseName")
} else {
project.projectDir = new File(settingsDir, "modules/$fileBaseName")
}
}