Migrate built in plugins to their own repos, closes #1520.
This commit is contained in:
parent
9995d27841
commit
473b0f4dff
|
@ -5,20 +5,6 @@
|
|||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//benchmark-micro.iml" filepath="$PROJECT_DIR$/.idea/modules//benchmark-micro.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules/elasticsearch.iml" filepath="$PROJECT_DIR$/.idea/modules/elasticsearch.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules/elasticsearch-root.iml" filepath="$PROJECT_DIR$/.idea/modules/elasticsearch-root.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules/plugin-analysis-icu.iml" filepath="$PROJECT_DIR$/.idea/modules/plugin-analysis-icu.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugin-cloud-aws.iml" filepath="$PROJECT_DIR$/.idea/modules//plugin-cloud-aws.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules/plugin-lang-groovy.iml" filepath="$PROJECT_DIR$/.idea/modules/plugin-lang-groovy.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugin-lang-javascript.iml" filepath="$PROJECT_DIR$/.idea/modules//plugin-lang-javascript.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugin-lang-python.iml" filepath="$PROJECT_DIR$/.idea/modules//plugin-lang-python.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugin-mapper-attachments.iml" filepath="$PROJECT_DIR$/.idea/modules//plugin-mapper-attachments.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugin-river-couchdb.iml" filepath="$PROJECT_DIR$/.idea/modules//plugin-river-couchdb.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugin-river-rabbitmq.iml" filepath="$PROJECT_DIR$/.idea/modules//plugin-river-rabbitmq.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules/plugin-river-twitter.iml" filepath="$PROJECT_DIR$/.idea/modules/plugin-river-twitter.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugin-river-wikipedia.iml" filepath="$PROJECT_DIR$/.idea/modules//plugin-river-wikipedia.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugin-transport-memcached.iml" filepath="$PROJECT_DIR$/.idea/modules//plugin-transport-memcached.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugin-transport-thrift.iml" filepath="$PROJECT_DIR$/.idea/modules//plugin-transport-thrift.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugin-transport-wares.iml" filepath="$PROJECT_DIR$/.idea/modules//plugin-transport-wares.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//plugins-hadoop.iml" filepath="$PROJECT_DIR$/.idea/modules//plugins-hadoop.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//test-integration.iml" filepath="$PROJECT_DIR$/.idea/modules//test-integration.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/modules//test-testng.iml" filepath="$PROJECT_DIR$/.idea/modules//test-testng.iml" />
|
||||
</modules>
|
||||
|
|
|
@ -16,19 +16,6 @@
|
|||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="module" module-name="elasticsearch" />
|
||||
<orderEntry type="module" module-name="plugin-lang-groovy" />
|
||||
<orderEntry type="module" module-name="plugin-lang-javascript" />
|
||||
<orderEntry type="module" module-name="plugin-lang-python" />
|
||||
<orderEntry type="module" module-name="plugin-mapper-attachments" />
|
||||
<orderEntry type="module" module-name="plugin-transport-memcached" />
|
||||
<orderEntry type="module" module-name="plugin-transport-thrift" />
|
||||
<orderEntry type="module" module-name="plugin-analysis-icu" />
|
||||
<orderEntry type="module" module-name="plugins-hadoop" />
|
||||
<orderEntry type="module" module-name="plugin-cloud-aws" />
|
||||
<orderEntry type="module" module-name="plugin-river-twitter" />
|
||||
<orderEntry type="module" module-name="plugin-river-couchdb" />
|
||||
<orderEntry type="module" module-name="plugin-river-rabbitmq" />
|
||||
<orderEntry type="module" module-name="plugin-river-wikipedia" />
|
||||
<orderEntry type="module" module-name="test-integration" />
|
||||
</component>
|
||||
</module>
|
||||
|
|
|
@ -1,134 +0,0 @@
|
|||
dependsOn(':elasticsearch')
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
apply plugin: 'eclipse'
|
||||
|
||||
archivesBaseName = "elasticsearch-analysis-icu"
|
||||
|
||||
explodedDistDir = new File(distsDir, 'exploded')
|
||||
|
||||
manifest.mainAttributes("Implementation-Title": "ElasticSearch::Plugins::Analysis::ICU", "Implementation-Version": rootProject.version, "Implementation-Date": buildTimeStr)
|
||||
|
||||
configurations.compile.transitive = true
|
||||
configurations.testCompile.transitive = true
|
||||
|
||||
// no need to use the resource dir
|
||||
sourceSets.main.resources.srcDirs 'src/main/java'
|
||||
sourceSets.test.resources.srcDirs 'src/test/java'
|
||||
|
||||
// add the source files to the dist jar
|
||||
//jar {
|
||||
// from sourceSets.main.allJava
|
||||
//}
|
||||
|
||||
configurations {
|
||||
dists
|
||||
distLib {
|
||||
visible = false
|
||||
transitive = false
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':elasticsearch')
|
||||
|
||||
compile('org.apache.lucene:lucene-icu4j:3.2.0') { transitive = false }
|
||||
distLib('org.apache.lucene:lucene-icu4j:3.2.0') { transitive = false }
|
||||
compile('org.apache.lucene:lucene-icu:3.5.0') { transitive = false }
|
||||
distLib('org.apache.lucene:lucene-icu:3.5.0') { transitive = false }
|
||||
}
|
||||
|
||||
task explodedDist(dependsOn: [jar], description: 'Builds the plugin zip file') << {
|
||||
[explodedDistDir]*.mkdirs()
|
||||
|
||||
copy {
|
||||
from configurations.distLib
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
// remove elasticsearch files (compile above adds the elasticsearch one)
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*.jar") }
|
||||
|
||||
copy {
|
||||
from libsDir
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-javadoc.jar") }
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-sources.jar") }
|
||||
}
|
||||
|
||||
task zip(type: Zip, dependsOn: ['explodedDist']) {
|
||||
from(explodedDistDir) {
|
||||
}
|
||||
}
|
||||
|
||||
task release(dependsOn: [zip]) << {
|
||||
ant.delete(dir: explodedDistDir)
|
||||
copy {
|
||||
from distsDir
|
||||
into(new File(rootProject.distsDir, "plugins"))
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
deployerJars
|
||||
}
|
||||
|
||||
dependencies {
|
||||
deployerJars "org.apache.maven.wagon:wagon-http:1.0-beta-2"
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier = 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
artifacts {
|
||||
archives sourcesJar
|
||||
archives javadocJar
|
||||
}
|
||||
|
||||
uploadArchives {
|
||||
repositories.mavenDeployer {
|
||||
configuration = configurations.deployerJars
|
||||
repository(url: rootProject.mavenRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
snapshotRepository(url: rootProject.mavenSnapshotRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
|
||||
pom.project {
|
||||
inceptionYear '2009'
|
||||
name 'elasticsearch-plugins-analysis-icu'
|
||||
description 'ICU Analysis Plugin for ElasticSearch'
|
||||
licenses {
|
||||
license {
|
||||
name 'The Apache Software License, Version 2.0'
|
||||
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
distribution 'repo'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection 'git://github.com/elasticsearch/elasticsearch.git'
|
||||
developerConnection 'git@github.com:elasticsearch/elasticsearch.git'
|
||||
url 'http://github.com/elasticsearch/elasticsearch'
|
||||
}
|
||||
}
|
||||
|
||||
pom.whenConfigured {pom ->
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> dep.scope != 'test' } // removes the test scoped ones
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eclipseClasspath {
|
||||
defaultOutputDir = file('build/eclipse-build')
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
plugin=org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class IcuAnalysisBinderProcessor extends AnalysisModule.AnalysisBinderProcessor {
|
||||
|
||||
@Override public void processTokenFilters(TokenFiltersBindings tokenFiltersBindings) {
|
||||
tokenFiltersBindings.processTokenFilter("icuNormalizer", IcuNormalizerTokenFilterFactory.class);
|
||||
tokenFiltersBindings.processTokenFilter("icu_normalizer", IcuNormalizerTokenFilterFactory.class);
|
||||
|
||||
tokenFiltersBindings.processTokenFilter("icuFolding", IcuFoldingTokenFilterFactory.class);
|
||||
tokenFiltersBindings.processTokenFilter("icu_folding", IcuFoldingTokenFilterFactory.class);
|
||||
|
||||
tokenFiltersBindings.processTokenFilter("icuCollation", IcuCollationTokenFilterFactory.class);
|
||||
tokenFiltersBindings.processTokenFilter("icu_collation", IcuCollationTokenFilterFactory.class);
|
||||
}
|
||||
}
|
|
@ -1,104 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import com.ibm.icu.text.Collator;
|
||||
import com.ibm.icu.text.RuleBasedCollator;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.collation.ICUCollationKeyFilter;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.assistedinject.Assisted;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.FailedToResolveConfigException;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* An ICU based collation token filter. There are two ways to configure collation:
|
||||
*
|
||||
* <p>The first is simply specifying the locale (defaults to the default locale). The <tt>language</tt>
|
||||
* parameter is the lowercase two-letter ISO-639 code. An additional <tt>country</tt> and <tt>variant</tt>
|
||||
* can be provided.
|
||||
*
|
||||
* <p>The second option is to specify collation rules as defined in the <a href="http://www.icu-project.org/userguide/Collate_Customization.html">
|
||||
* Collation customization</a> chapter in icu docs. The <tt>rules</tt> parameter can either embed the rules definition
|
||||
* in the settings or refer to an external location (preferable located under the <tt>config</tt> location, relative to it).
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class IcuCollationTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
private final Collator collator;
|
||||
|
||||
@Inject public IcuCollationTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, Environment environment, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name, settings);
|
||||
|
||||
Collator collator;
|
||||
String rules = settings.get("rules");
|
||||
if (rules != null) {
|
||||
FailedToResolveConfigException failureToResolve = null;
|
||||
try {
|
||||
rules = environment.resolveConfigAndLoadToString(rules);
|
||||
} catch (FailedToResolveConfigException e) {
|
||||
failureToResolve = e;
|
||||
} catch (IOException e) {
|
||||
throw new ElasticSearchIllegalArgumentException("Failed to load collation rules", e);
|
||||
}
|
||||
try {
|
||||
collator = new RuleBasedCollator(rules);
|
||||
} catch (Exception e) {
|
||||
if (failureToResolve != null) {
|
||||
throw new ElasticSearchIllegalArgumentException("Failed to resolve collation rules location", failureToResolve);
|
||||
} else {
|
||||
throw new ElasticSearchIllegalArgumentException("Failed to parse collation rules", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
String language = settings.get("language");
|
||||
if (language != null) {
|
||||
Locale locale;
|
||||
String country = settings.get("country");
|
||||
if (country != null) {
|
||||
String variant = settings.get("variant");
|
||||
if (variant != null) {
|
||||
locale = new Locale(language, country, variant);
|
||||
} else {
|
||||
locale = new Locale(language, country);
|
||||
}
|
||||
} else {
|
||||
locale = new Locale(language);
|
||||
}
|
||||
collator = Collator.getInstance(locale);
|
||||
} else {
|
||||
collator = Collator.getInstance();
|
||||
}
|
||||
}
|
||||
this.collator = collator;
|
||||
}
|
||||
|
||||
@Override public TokenStream create(TokenStream tokenStream) {
|
||||
return new ICUCollationKeyFilter(tokenStream, collator);
|
||||
}
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.icu.ICUFoldingFilter;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.assistedinject.Assisted;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class IcuFoldingTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
@Inject public IcuFoldingTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name, settings);
|
||||
}
|
||||
|
||||
@Override public TokenStream create(TokenStream tokenStream) {
|
||||
return new ICUFoldingFilter(tokenStream);
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import com.ibm.icu.text.Normalizer2;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.assistedinject.Assisted;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
|
||||
/**
|
||||
* Uses the {@link org.apache.lucene.analysis.icu.ICUNormalizer2Filter} to normalize tokens.
|
||||
*
|
||||
* <p>The <tt>name</tt> can be used to provide the type of normalization to perform.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class IcuNormalizerTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
private final String name;
|
||||
|
||||
@Inject public IcuNormalizerTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name, settings);
|
||||
this.name = settings.get("name", "nfkc_cf");
|
||||
}
|
||||
|
||||
@Override public TokenStream create(TokenStream tokenStream) {
|
||||
return new org.apache.lucene.analysis.icu.ICUNormalizer2Filter(tokenStream, Normalizer2.getInstance(null, name, Normalizer2.Mode.COMPOSE));
|
||||
}
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.analysis.icu;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.index.analysis.AnalysisModule;
|
||||
import org.elasticsearch.index.analysis.IcuAnalysisBinderProcessor;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AnalysisICUPlugin extends AbstractPlugin {
|
||||
|
||||
@Override public String name() {
|
||||
return "analysis-icu";
|
||||
}
|
||||
|
||||
@Override public String description() {
|
||||
return "UTF related ICU analysis support";
|
||||
}
|
||||
|
||||
@Override public void processModule(Module module) {
|
||||
if (module instanceof AnalysisModule) {
|
||||
AnalysisModule analysisModule = (AnalysisModule) module;
|
||||
analysisModule.addProcessor(new IcuAnalysisBinderProcessor());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.EnvironmentModule;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexNameModule;
|
||||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.indices.analysis.IndicesAnalysisModule;
|
||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class SimpleIcuAnalysisTests {
|
||||
|
||||
@Test public void testDefaultsIcuAnalysis() {
|
||||
Index index = new Index("test");
|
||||
|
||||
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(EMPTY_SETTINGS), new EnvironmentModule(new Environment(EMPTY_SETTINGS)), new IndicesAnalysisModule()).createInjector();
|
||||
Injector injector = new ModulesBuilder().add(
|
||||
new IndexSettingsModule(index, EMPTY_SETTINGS),
|
||||
new IndexNameModule(index),
|
||||
new AnalysisModule(EMPTY_SETTINGS, parentInjector.getInstance(IndicesAnalysisService.class)).addProcessor(new IcuAnalysisBinderProcessor()))
|
||||
.createChildInjector(parentInjector);
|
||||
|
||||
AnalysisService analysisService = injector.getInstance(AnalysisService.class);
|
||||
|
||||
TokenFilterFactory filterFactory = analysisService.tokenFilter("icu_normalizer");
|
||||
MatcherAssert.assertThat(filterFactory, instanceOf(IcuNormalizerTokenFilterFactory.class));
|
||||
}
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
dependsOn(':elasticsearch')
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
apply plugin: 'eclipse'
|
||||
|
||||
archivesBaseName = "elasticsearch-cloud-aws"
|
||||
|
||||
explodedDistDir = new File(distsDir, 'exploded')
|
||||
|
||||
configurations.compile.transitive = true
|
||||
configurations.testCompile.transitive = true
|
||||
|
||||
// no need to use the resource dir
|
||||
sourceSets.main.resources.srcDirs 'src/main/java'
|
||||
sourceSets.test.resources.srcDirs 'src/test/java'
|
||||
|
||||
jar {
|
||||
// from sourceSets.main.allJava
|
||||
manifest {
|
||||
attributes("Implementation-Title": "ElasticSearch", "Implementation-Version": rootProject.version, "Implementation-Date": buildTimeStr)
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
dists
|
||||
distLib {
|
||||
visible = false
|
||||
transitive = false
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':elasticsearch')
|
||||
compile("com.amazonaws:aws-java-sdk:1.2.12") { transitive = false }
|
||||
runtime("commons-logging:commons-logging:1.1.1") { transitive = false }
|
||||
runtime("commons-codec:commons-codec:1.3") { transitive = false }
|
||||
runtime("org.apache.httpcomponents:httpclient:4.1.1") { transitive = false }
|
||||
runtime("org.apache.httpcomponents:httpcore:4.1") { transitive = false }
|
||||
|
||||
distLib("com.amazonaws:aws-java-sdk:1.2.12") { transitive = false }
|
||||
distLib("commons-codec:commons-codec:1.3") { transitive = false }
|
||||
distLib("commons-logging:commons-logging:1.1.1") { transitive = false }
|
||||
distLib("org.apache.httpcomponents:httpclient:4.1.1") { transitive = false }
|
||||
distLib("org.apache.httpcomponents:httpcore:4.1") { transitive = false }
|
||||
}
|
||||
|
||||
task explodedDist(dependsOn: [jar], description: 'Builds the plugin zip file') << {
|
||||
[explodedDistDir]*.mkdirs()
|
||||
|
||||
copy {
|
||||
from configurations.distLib
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
// remove elasticsearch files (compile above adds the elasticsearch one)
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*.jar") }
|
||||
|
||||
copy {
|
||||
from libsDir
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-javadoc.jar") }
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-sources.jar") }
|
||||
}
|
||||
|
||||
task zip(type: Zip, dependsOn: ['explodedDist']) {
|
||||
from(explodedDistDir) {
|
||||
}
|
||||
}
|
||||
|
||||
task release(dependsOn: [zip]) << {
|
||||
ant.delete(dir: explodedDistDir)
|
||||
copy {
|
||||
from distsDir
|
||||
into(new File(rootProject.distsDir, "plugins"))
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
deployerJars
|
||||
}
|
||||
|
||||
dependencies {
|
||||
deployerJars "org.apache.maven.wagon:wagon-http:1.0-beta-2"
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier = 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
artifacts {
|
||||
archives sourcesJar
|
||||
archives javadocJar
|
||||
}
|
||||
|
||||
uploadArchives {
|
||||
repositories.mavenDeployer {
|
||||
configuration = configurations.deployerJars
|
||||
repository(url: rootProject.mavenRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
snapshotRepository(url: rootProject.mavenSnapshotRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
|
||||
pom.project {
|
||||
inceptionYear '2009'
|
||||
name 'elasticsearch-plugins-cloud-aws'
|
||||
description 'Cloud AWS Plugin for ElasticSearch'
|
||||
licenses {
|
||||
license {
|
||||
name 'The Apache Software License, Version 2.0'
|
||||
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
distribution 'repo'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection 'git://github.com/elasticsearch/elasticsearch.git'
|
||||
developerConnection 'git@github.com:elasticsearch/elasticsearch.git'
|
||||
url 'http://github.com/elasticsearch/elasticsearch'
|
||||
}
|
||||
}
|
||||
|
||||
pom.whenConfigured {pom ->
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> dep.scope != 'test' } // removes the test scoped ones
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eclipseClasspath {
|
||||
defaultOutputDir = file('build/eclipse-build')
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
plugin=org.elasticsearch.plugin.cloud.aws.CloudAwsPlugin
|
||||
|
|
@ -1,143 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cloud.aws;
|
||||
|
||||
import com.amazonaws.ClientConfiguration;
|
||||
import com.amazonaws.Protocol;
|
||||
import com.amazonaws.auth.BasicAWSCredentials;
|
||||
import com.amazonaws.services.ec2.AmazonEC2;
|
||||
import com.amazonaws.services.ec2.AmazonEC2Client;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.cloud.aws.network.Ec2NameResolver;
|
||||
import org.elasticsearch.cloud.aws.node.Ec2CustomNodeAttributes;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodeService;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AwsEc2Service extends AbstractLifecycleComponent<AwsEc2Service> {
|
||||
|
||||
public static final String EC2_METADATA_URL = "http://169.254.169.254/latest/meta-data/";
|
||||
|
||||
private AmazonEC2Client client;
|
||||
|
||||
@Inject public AwsEc2Service(Settings settings, SettingsFilter settingsFilter, NetworkService networkService, DiscoveryNodeService discoveryNodeService) {
|
||||
super(settings);
|
||||
settingsFilter.addFilter(new AwsSettingsFilter());
|
||||
// add specific ec2 name resolver
|
||||
networkService.addCustomNameResolver(new Ec2NameResolver(settings));
|
||||
discoveryNodeService.addCustomAttributeProvider(new Ec2CustomNodeAttributes(settings));
|
||||
}
|
||||
|
||||
public synchronized AmazonEC2 client() {
|
||||
if (client != null) {
|
||||
return client;
|
||||
}
|
||||
|
||||
ClientConfiguration clientConfiguration = new ClientConfiguration();
|
||||
String protocol = componentSettings.get("protocol", "http").toLowerCase();
|
||||
if ("http".equals(protocol)) {
|
||||
clientConfiguration.setProtocol(Protocol.HTTP);
|
||||
} else if ("https".equals(protocol)) {
|
||||
clientConfiguration.setProtocol(Protocol.HTTPS);
|
||||
} else {
|
||||
throw new ElasticSearchIllegalArgumentException("No protocol supported [" + protocol + "], can either be [http] or [https]");
|
||||
}
|
||||
String account = componentSettings.get("access_key", settings.get("cloud.account"));
|
||||
String key = componentSettings.get("secret_key", settings.get("cloud.key"));
|
||||
|
||||
if (account == null) {
|
||||
throw new ElasticSearchIllegalArgumentException("No aws access_key defined for ec2 discovery");
|
||||
}
|
||||
if (key == null) {
|
||||
throw new ElasticSearchIllegalArgumentException("No aws secret_key defined for ec2 discovery");
|
||||
}
|
||||
|
||||
String proxyHost = componentSettings.get("proxy_host");
|
||||
if (proxyHost != null) {
|
||||
String portString = componentSettings.get("proxy_port", "80");
|
||||
Integer proxyPort;
|
||||
try {
|
||||
proxyPort = Integer.parseInt(portString, 10);
|
||||
} catch (NumberFormatException ex) {
|
||||
throw new ElasticSearchIllegalArgumentException("The configured proxy port value [" + portString + "] is invalid", ex);
|
||||
}
|
||||
clientConfiguration.withProxyHost(proxyHost).setProxyPort(proxyPort);
|
||||
}
|
||||
|
||||
this.client = new AmazonEC2Client(new BasicAWSCredentials(account, key), clientConfiguration);
|
||||
|
||||
if (componentSettings.get("ec2.endpoint") != null) {
|
||||
client.setEndpoint(componentSettings.get("ec2.endpoint"));
|
||||
} else if (componentSettings.get("region") != null) {
|
||||
String endpoint;
|
||||
String region = componentSettings.get("region");
|
||||
if ("us-east".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.us-east-1.amazonaws.com";
|
||||
} else if ("us-east-1".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.us-east-1.amazonaws.com";
|
||||
} else if ("us-west".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.us-west-1.amazonaws.com";
|
||||
} else if ("us-west-1".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.us-west-1.amazonaws.com";
|
||||
} else if ("us-west-2".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.us-west-2.amazonaws.com";
|
||||
} else if ("ap-southeast".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.ap-southeast-1.amazonaws.com";
|
||||
} else if ("ap-southeast-1".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.ap-southeast-1.amazonaws.com";
|
||||
} else if ("ap-northeast".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.ap-northeast.amazonaws.com\t";
|
||||
} else if ("ap-northeast-1".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.ap-northeast-1.amazonaws.com\t";
|
||||
} else if ("eu-west".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.eu-west-1.amazonaws.com";
|
||||
} else if ("eu-west-1".equals(region.toLowerCase())) {
|
||||
endpoint = "ec2.eu-west-1.amazonaws.com";
|
||||
} else {
|
||||
throw new ElasticSearchIllegalArgumentException("No automatic endpoint could be derived from region [" + region + "]");
|
||||
}
|
||||
if (endpoint != null) {
|
||||
client.setEndpoint(endpoint);
|
||||
}
|
||||
}
|
||||
|
||||
return this.client;
|
||||
|
||||
}
|
||||
|
||||
@Override protected void doStart() throws ElasticSearchException {
|
||||
}
|
||||
|
||||
@Override protected void doStop() throws ElasticSearchException {
|
||||
}
|
||||
|
||||
@Override protected void doClose() throws ElasticSearchException {
|
||||
if (client != null) {
|
||||
client.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cloud.aws;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AwsModule extends AbstractModule {
|
||||
|
||||
@Override protected void configure() {
|
||||
bind(AwsS3Service.class).asEagerSingleton();
|
||||
bind(AwsEc2Service.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -1,135 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cloud.aws;
|
||||
|
||||
import com.amazonaws.ClientConfiguration;
|
||||
import com.amazonaws.Protocol;
|
||||
import com.amazonaws.auth.BasicAWSCredentials;
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.AmazonS3Client;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AwsS3Service extends AbstractLifecycleComponent<AwsS3Service> {
|
||||
|
||||
private AmazonS3Client client;
|
||||
|
||||
@Inject public AwsS3Service(Settings settings, SettingsFilter settingsFilter) {
|
||||
super(settings);
|
||||
|
||||
settingsFilter.addFilter(new AwsSettingsFilter());
|
||||
}
|
||||
|
||||
public synchronized AmazonS3 client() {
|
||||
if (client != null) {
|
||||
return client;
|
||||
}
|
||||
|
||||
ClientConfiguration clientConfiguration = new ClientConfiguration();
|
||||
String protocol = componentSettings.get("protocol", "http").toLowerCase();
|
||||
if ("http".equals(protocol)) {
|
||||
clientConfiguration.setProtocol(Protocol.HTTP);
|
||||
} else if ("https".equals(protocol)) {
|
||||
clientConfiguration.setProtocol(Protocol.HTTPS);
|
||||
} else {
|
||||
throw new ElasticSearchIllegalArgumentException("No protocol supported [" + protocol + "], can either be [http] or [https]");
|
||||
}
|
||||
String account = componentSettings.get("access_key", settings.get("cloud.account"));
|
||||
String key = componentSettings.get("secret_key", settings.get("cloud.key"));
|
||||
|
||||
if (account == null) {
|
||||
throw new ElasticSearchIllegalArgumentException("No s3 access_key defined for s3 gateway");
|
||||
}
|
||||
if (key == null) {
|
||||
throw new ElasticSearchIllegalArgumentException("No s3 secret_key defined for s3 gateway");
|
||||
}
|
||||
|
||||
String proxyHost = componentSettings.get("proxy_host");
|
||||
if (proxyHost != null) {
|
||||
String portString = componentSettings.get("proxy_port", "80");
|
||||
Integer proxyPort;
|
||||
try {
|
||||
proxyPort = Integer.parseInt(portString, 10);
|
||||
} catch (NumberFormatException ex) {
|
||||
throw new ElasticSearchIllegalArgumentException("The configured proxy port value [" + portString + "] is invalid", ex);
|
||||
}
|
||||
clientConfiguration.withProxyHost(proxyHost).setProxyPort(proxyPort);
|
||||
}
|
||||
|
||||
this.client = new AmazonS3Client(new BasicAWSCredentials(account, key), clientConfiguration);
|
||||
|
||||
if (componentSettings.get("s3.endpoint") != null) {
|
||||
client.setEndpoint(componentSettings.get("s3.endpoint"));
|
||||
} else if (componentSettings.get("region") != null) {
|
||||
String endpoint;
|
||||
String region = componentSettings.get("region");
|
||||
if ("us-east".equals(region.toLowerCase())) {
|
||||
endpoint = "s3.amazonaws.com";
|
||||
} else if ("us-east-1".equals(region.toLowerCase())) {
|
||||
endpoint = "s3.amazonaws.com";
|
||||
} else if ("us-west".equals(region.toLowerCase())) {
|
||||
endpoint = "s3-us-west-1.amazonaws.com";
|
||||
} else if ("us-west-1".equals(region.toLowerCase())) {
|
||||
endpoint = "s3-us-west-1.amazonaws.com";
|
||||
} else if ("us-west-2".equals(region.toLowerCase())) {
|
||||
endpoint = "s3-us-west-2.amazonaws.com";
|
||||
} else if ("ap-southeast".equals(region.toLowerCase())) {
|
||||
endpoint = "s3-ap-southeast-1.amazonaws.com";
|
||||
} else if ("ap-southeast-1".equals(region.toLowerCase())) {
|
||||
endpoint = "s3-ap-southeast-1.amazonaws.com";
|
||||
} else if ("ap-northeast".equals(region.toLowerCase())) {
|
||||
endpoint = "s3-ap-northeast-1.amazonaws.com";
|
||||
} else if ("ap-northeast-1".equals(region.toLowerCase())) {
|
||||
endpoint = "s3-ap-northeast-1.amazonaws.com";
|
||||
} else if ("eu-west".equals(region.toLowerCase())) {
|
||||
endpoint = "s3-eu-west-1.amazonaws.com";
|
||||
} else if ("eu-west-1".equals(region.toLowerCase())) {
|
||||
endpoint = "s3-eu-west-1.amazonaws.com";
|
||||
} else {
|
||||
throw new ElasticSearchIllegalArgumentException("No automatic endpoint could be derived from region [" + region + "]");
|
||||
}
|
||||
if (endpoint != null) {
|
||||
client.setEndpoint(endpoint);
|
||||
}
|
||||
}
|
||||
|
||||
return this.client;
|
||||
}
|
||||
|
||||
@Override protected void doStart() throws ElasticSearchException {
|
||||
}
|
||||
|
||||
@Override protected void doStop() throws ElasticSearchException {
|
||||
}
|
||||
|
||||
@Override protected void doClose() throws ElasticSearchException {
|
||||
if (client != null) {
|
||||
client.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cloud.aws;
|
||||
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AwsSettingsFilter implements SettingsFilter.Filter {
|
||||
|
||||
@Override public void filter(ImmutableSettings.Builder settings) {
|
||||
settings.remove("cloud.key");
|
||||
settings.remove("cloud.account");
|
||||
settings.remove("cloud.aws.access_key");
|
||||
settings.remove("cloud.aws.secret_key");
|
||||
}
|
||||
}
|
|
@ -1,128 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cloud.aws.blobstore;
|
||||
|
||||
import com.amazonaws.services.s3.model.ObjectListing;
|
||||
import com.amazonaws.services.s3.model.S3Object;
|
||||
import com.amazonaws.services.s3.model.S3ObjectSummary;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
|
||||
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
|
||||
import org.elasticsearch.common.collect.ImmutableMap;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AbstractS3BlobContainer extends AbstractBlobContainer {
|
||||
|
||||
protected final S3BlobStore blobStore;
|
||||
|
||||
protected final String keyPath;
|
||||
|
||||
public AbstractS3BlobContainer(BlobPath path, S3BlobStore blobStore) {
|
||||
super(path);
|
||||
this.blobStore = blobStore;
|
||||
this.keyPath = path.buildAsString("/") + "/";
|
||||
}
|
||||
|
||||
@Override public boolean blobExists(String blobName) {
|
||||
try {
|
||||
blobStore.client().getObjectMetadata(blobStore.bucket(), buildKey(blobName));
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override public boolean deleteBlob(String blobName) throws IOException {
|
||||
blobStore.client().deleteObject(blobStore.bucket(), buildKey(blobName));
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override public void readBlob(final String blobName, final ReadBlobListener listener) {
|
||||
blobStore.executor().execute(new Runnable() {
|
||||
@Override public void run() {
|
||||
InputStream is;
|
||||
try {
|
||||
S3Object object = blobStore.client().getObject(blobStore.bucket(), buildKey(blobName));
|
||||
is = object.getObjectContent();
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
return;
|
||||
}
|
||||
byte[] buffer = new byte[blobStore.bufferSizeInBytes()];
|
||||
try {
|
||||
int bytesRead;
|
||||
while ((bytesRead = is.read(buffer)) != -1) {
|
||||
listener.onPartial(buffer, 0, bytesRead);
|
||||
}
|
||||
listener.onCompleted();
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
is.close();
|
||||
} catch (IOException e1) {
|
||||
// ignore
|
||||
}
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override public ImmutableMap<String, BlobMetaData> listBlobsByPrefix(@Nullable String blobNamePrefix) throws IOException {
|
||||
ImmutableMap.Builder<String, BlobMetaData> blobsBuilder = ImmutableMap.builder();
|
||||
ObjectListing prevListing = null;
|
||||
while (true) {
|
||||
ObjectListing list;
|
||||
if (prevListing != null) {
|
||||
list = blobStore.client().listNextBatchOfObjects(prevListing);
|
||||
} else {
|
||||
if (blobNamePrefix != null) {
|
||||
list = blobStore.client().listObjects(blobStore.bucket(), buildKey(blobNamePrefix));
|
||||
} else {
|
||||
list = blobStore.client().listObjects(blobStore.bucket(), keyPath);
|
||||
}
|
||||
}
|
||||
for (S3ObjectSummary summary : list.getObjectSummaries()) {
|
||||
String name = summary.getKey().substring(keyPath.length());
|
||||
blobsBuilder.put(name, new PlainBlobMetaData(name, summary.getSize()));
|
||||
}
|
||||
if (list.isTruncated()) {
|
||||
prevListing = list;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return blobsBuilder.build();
|
||||
}
|
||||
|
||||
@Override public ImmutableMap<String, BlobMetaData> listBlobs() throws IOException {
|
||||
return listBlobsByPrefix(null);
|
||||
}
|
||||
|
||||
protected String buildKey(String blobName) {
|
||||
return keyPath + blobName;
|
||||
}
|
||||
}
|
|
@ -1,115 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cloud.aws.blobstore;
|
||||
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.model.ObjectListing;
|
||||
import com.amazonaws.services.s3.model.S3ObjectSummary;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.BlobStore;
|
||||
import org.elasticsearch.common.blobstore.ImmutableBlobContainer;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class S3BlobStore extends AbstractComponent implements BlobStore {
|
||||
|
||||
private final AmazonS3 client;
|
||||
|
||||
private final String bucket;
|
||||
|
||||
private final String region;
|
||||
|
||||
private final Executor executor;
|
||||
|
||||
private final int bufferSizeInBytes;
|
||||
|
||||
public S3BlobStore(Settings settings, AmazonS3 client, String bucket, @Nullable String region, Executor executor) {
|
||||
super(settings);
|
||||
this.client = client;
|
||||
this.bucket = bucket;
|
||||
this.region = region;
|
||||
this.executor = executor;
|
||||
|
||||
this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes();
|
||||
|
||||
if (!client.doesBucketExist(bucket)) {
|
||||
if (region != null) {
|
||||
client.createBucket(bucket, region);
|
||||
} else {
|
||||
client.createBucket(bucket);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override public String toString() {
|
||||
return (region == null ? "" : region + "/") + bucket;
|
||||
}
|
||||
|
||||
public AmazonS3 client() {
|
||||
return client;
|
||||
}
|
||||
|
||||
public String bucket() {
|
||||
return bucket;
|
||||
}
|
||||
|
||||
public Executor executor() {
|
||||
return executor;
|
||||
}
|
||||
|
||||
public int bufferSizeInBytes() {
|
||||
return bufferSizeInBytes;
|
||||
}
|
||||
|
||||
@Override public ImmutableBlobContainer immutableBlobContainer(BlobPath path) {
|
||||
return new S3ImmutableBlobContainer(path, this);
|
||||
}
|
||||
|
||||
@Override public void delete(BlobPath path) {
|
||||
ObjectListing prevListing = null;
|
||||
while (true) {
|
||||
ObjectListing list;
|
||||
if (prevListing != null) {
|
||||
list = client.listNextBatchOfObjects(prevListing);
|
||||
} else {
|
||||
list = client.listObjects(bucket, path.buildAsString("/"));
|
||||
}
|
||||
for (S3ObjectSummary summary : list.getObjectSummaries()) {
|
||||
client.deleteObject(summary.getBucketName(), summary.getKey());
|
||||
}
|
||||
if (list.isTruncated()) {
|
||||
prevListing = list;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void close() {
|
||||
}
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cloud.aws.blobstore;
|
||||
|
||||
import com.amazonaws.services.s3.model.ObjectMetadata;
|
||||
import com.amazonaws.services.s3.model.PutObjectResult;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.ImmutableBlobContainer;
|
||||
import org.elasticsearch.common.blobstore.support.BlobStores;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class S3ImmutableBlobContainer extends AbstractS3BlobContainer implements ImmutableBlobContainer {
|
||||
|
||||
public S3ImmutableBlobContainer(BlobPath path, S3BlobStore blobStore) {
|
||||
super(path, blobStore);
|
||||
}
|
||||
|
||||
@Override public void writeBlob(final String blobName, final InputStream is, final long sizeInBytes, final WriterListener listener) {
|
||||
blobStore.executor().execute(new Runnable() {
|
||||
@Override public void run() {
|
||||
try {
|
||||
ObjectMetadata md = new ObjectMetadata();
|
||||
md.setContentLength(sizeInBytes);
|
||||
PutObjectResult objectResult = blobStore.client().putObject(blobStore.bucket(), buildKey(blobName), is, md);
|
||||
listener.onCompleted();
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override public void writeBlob(String blobName, InputStream is, long sizeInBytes) throws IOException {
|
||||
BlobStores.syncWriteBlob(this, blobName, is, sizeInBytes);
|
||||
}
|
||||
}
|
|
@ -1,138 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cloud.aws.network;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cloud.aws.AwsEc2Service;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.io.Closeables;
|
||||
import org.elasticsearch.common.network.NetworkService.CustomNameResolver;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.InetAddress;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
|
||||
/**
|
||||
* Resolves certain ec2 related 'meta' hostnames into an actual hostname
|
||||
* obtained from ec2 meta-data.
|
||||
* <p />
|
||||
* Valid config values for {@link Ec2HostnameType}s are -
|
||||
* <ul>
|
||||
* <li>_ec2_ - maps to privateIpv4</li>
|
||||
* <li>_ec2:privateIp_ - maps to privateIpv4</li>
|
||||
* <li>_ec2:privateIpv4_</li>
|
||||
* <li>_ec2:privateDns_</li>
|
||||
* <li>_ec2:publicIp_ - maps to publicIpv4</li>
|
||||
* <li>_ec2:publicIpv4_</li>
|
||||
* <li>_ec2:publicDns_</li>
|
||||
* </ul>
|
||||
*
|
||||
* @author Paul_Loy (keteracel)
|
||||
*/
|
||||
public class Ec2NameResolver extends AbstractComponent implements CustomNameResolver {
|
||||
|
||||
/**
|
||||
* enum that can be added to over time with more meta-data types (such as ipv6 when this is available)
|
||||
*
|
||||
* @author Paul_Loy
|
||||
*/
|
||||
private static enum Ec2HostnameType {
|
||||
|
||||
PRIVATE_IPv4("ec2:privateIpv4", "local-ipv4"),
|
||||
PRIVATE_DNS("ec2:privateDns", "local-hostname"),
|
||||
PUBLIC_IPv4("ec2:publicIpv4", "public-ipv4"),
|
||||
PUBLIC_DNS("ec2:publicDns", "public-hostname"),
|
||||
|
||||
// some less verbose defaults
|
||||
PUBLIC_IP("ec2:publicIp", PUBLIC_IPv4.ec2Name),
|
||||
PRIVATE_IP("ec2:privateIp", PRIVATE_IPv4.ec2Name),
|
||||
EC2("ec2", PRIVATE_IPv4.ec2Name);
|
||||
|
||||
final String configName;
|
||||
final String ec2Name;
|
||||
|
||||
private Ec2HostnameType(String configName, String ec2Name) {
|
||||
this.configName = configName;
|
||||
this.ec2Name = ec2Name;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a {@link CustomNameResolver}.
|
||||
*/
|
||||
public Ec2NameResolver(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param type the ec2 hostname type to discover.
|
||||
* @return the appropriate host resolved from ec2 meta-data.
|
||||
* @throws IOException if ec2 meta-data cannot be obtained.
|
||||
* @see CustomNameResolver#resolveIfPossible(String)
|
||||
*/
|
||||
public InetAddress resolve(Ec2HostnameType type, boolean warnOnFailure) {
|
||||
URLConnection urlConnection = null;
|
||||
InputStream in = null;
|
||||
try {
|
||||
URL url = new URL(AwsEc2Service.EC2_METADATA_URL + type.ec2Name);
|
||||
logger.debug("obtaining ec2 hostname from ec2 meta-data url {}", url);
|
||||
urlConnection = url.openConnection();
|
||||
urlConnection.setConnectTimeout(2000);
|
||||
in = urlConnection.getInputStream();
|
||||
BufferedReader urlReader = new BufferedReader(new InputStreamReader(in));
|
||||
|
||||
String metadataResult = urlReader.readLine();
|
||||
if (metadataResult == null || metadataResult.length() == 0) {
|
||||
logger.error("no ec2 metadata returned from {}", url);
|
||||
return null;
|
||||
}
|
||||
return InetAddress.getByName(metadataResult);
|
||||
} catch (IOException e) {
|
||||
if (warnOnFailure) {
|
||||
logger.warn("failed to get metadata for [" + type.configName + "]: " + ExceptionsHelper.detailedMessage(e));
|
||||
} else {
|
||||
logger.debug("failed to get metadata for [" + type.configName + "]: " + ExceptionsHelper.detailedMessage(e));
|
||||
}
|
||||
return null;
|
||||
} finally {
|
||||
Closeables.closeQuietly(in);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public InetAddress resolveDefault() {
|
||||
return null; // using this, one has to explicitly specify _ec2_ in network setting
|
||||
// return resolve(Ec2HostnameType.DEFAULT, false);
|
||||
}
|
||||
|
||||
@Override public InetAddress resolveIfPossible(String value) {
|
||||
for (Ec2HostnameType type : Ec2HostnameType.values()) {
|
||||
if (type.configName.equals(value)) {
|
||||
return resolve(type, true);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,76 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cloud.aws.node;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cloud.aws.AwsEc2Service;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodeService;
|
||||
import org.elasticsearch.common.collect.Maps;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.io.Closeables;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class Ec2CustomNodeAttributes extends AbstractComponent implements DiscoveryNodeService.CustomAttributesProvider {
|
||||
|
||||
public Ec2CustomNodeAttributes(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
@Override public Map<String, String> buildAttributes() {
|
||||
if (!settings.getAsBoolean("cloud.node.auto_attributes", false)) {
|
||||
return null;
|
||||
}
|
||||
Map<String, String> ec2Attributes = Maps.newHashMap();
|
||||
|
||||
URLConnection urlConnection;
|
||||
InputStream in = null;
|
||||
try {
|
||||
URL url = new URL(AwsEc2Service.EC2_METADATA_URL + "placement/availability-zone");
|
||||
logger.debug("obtaining ec2 [placement/availability-zone] from ec2 meta-data url {}", url);
|
||||
urlConnection = url.openConnection();
|
||||
urlConnection.setConnectTimeout(2000);
|
||||
in = urlConnection.getInputStream();
|
||||
BufferedReader urlReader = new BufferedReader(new InputStreamReader(in));
|
||||
|
||||
String metadataResult = urlReader.readLine();
|
||||
if (metadataResult == null || metadataResult.length() == 0) {
|
||||
logger.error("no ec2 metadata returned from {}", url);
|
||||
return null;
|
||||
}
|
||||
ec2Attributes.put("aws_availability_zone", metadataResult);
|
||||
} catch (IOException e) {
|
||||
logger.debug("failed to get metadata for [placement/availability-zone]: " + ExceptionsHelper.detailedMessage(e));
|
||||
} finally {
|
||||
Closeables.closeQuietly(in);
|
||||
}
|
||||
|
||||
return ec2Attributes;
|
||||
}
|
||||
}
|
|
@ -1,195 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import com.amazonaws.services.ec2.AmazonEC2;
|
||||
import com.amazonaws.services.ec2.model.DescribeInstancesRequest;
|
||||
import com.amazonaws.services.ec2.model.DescribeInstancesResult;
|
||||
import com.amazonaws.services.ec2.model.Instance;
|
||||
import com.amazonaws.services.ec2.model.InstanceState;
|
||||
import com.amazonaws.services.ec2.model.Reservation;
|
||||
import com.amazonaws.services.ec2.model.Tag;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.collect.ImmutableSet;
|
||||
import org.elasticsearch.common.collect.Lists;
|
||||
import org.elasticsearch.common.collect.Sets;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.discovery.zen.ping.unicast.UnicastHostsProvider;
|
||||
import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHostsProvider {
|
||||
|
||||
private static enum HostType {
|
||||
PRIVATE_IP,
|
||||
PUBLIC_IP,
|
||||
PRIVATE_DNS,
|
||||
PUBLIC_DNS
|
||||
}
|
||||
|
||||
private final TransportService transportService;
|
||||
|
||||
private final AmazonEC2 client;
|
||||
|
||||
private final boolean bindAnyGroup;
|
||||
|
||||
private final ImmutableSet<String> groups;
|
||||
|
||||
private final ImmutableMap<String, String> tags;
|
||||
|
||||
private final ImmutableSet<String> availabilityZones;
|
||||
|
||||
private final HostType hostType;
|
||||
|
||||
@Inject public AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AmazonEC2 client) {
|
||||
super(settings);
|
||||
this.transportService = transportService;
|
||||
this.client = client;
|
||||
|
||||
this.hostType = HostType.valueOf(componentSettings.get("host_type", "private_ip").toUpperCase());
|
||||
|
||||
this.bindAnyGroup = componentSettings.getAsBoolean("any_group", true);
|
||||
this.groups = ImmutableSet.copyOf(componentSettings.getAsArray("groups"));
|
||||
|
||||
this.tags = componentSettings.getByPrefix("tag.").getAsMap();
|
||||
|
||||
Set<String> availabilityZones = Sets.newHashSet(componentSettings.getAsArray("availability_zones"));
|
||||
if (componentSettings.get("availability_zones") != null) {
|
||||
availabilityZones.addAll(Strings.commaDelimitedListToSet(componentSettings.get("availability_zones")));
|
||||
}
|
||||
this.availabilityZones = ImmutableSet.copyOf(availabilityZones);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("using host_type [{}], tags [{}], groups [{}] with any_group [{}], availability_zones [{}]", hostType, tags, groups, bindAnyGroup, availabilityZones);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public List<DiscoveryNode> buildDynamicNodes() {
|
||||
List<DiscoveryNode> discoNodes = Lists.newArrayList();
|
||||
|
||||
DescribeInstancesResult descInstances = client.describeInstances(new DescribeInstancesRequest());
|
||||
|
||||
logger.trace("building dynamic unicast discovery nodes...");
|
||||
for (Reservation reservation : descInstances.getReservations()) {
|
||||
if (!groups.isEmpty()) {
|
||||
// lets see if we can filter based on groups
|
||||
List<String> groupNames = reservation.getGroupNames();
|
||||
if (bindAnyGroup) {
|
||||
if (Collections.disjoint(groups, groupNames)) {
|
||||
logger.trace("filtering out reservation {} based on groups {}, not part of {}", reservation.getReservationId(), groupNames, groups);
|
||||
// continue to the next reservation
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
if (!groupNames.containsAll(groups)) {
|
||||
logger.trace("filtering out reservation {} based on groups {}, does not include all of {}", reservation.getReservationId(), groupNames, groups);
|
||||
// continue to the next reservation
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (Instance instance : reservation.getInstances()) {
|
||||
if (!availabilityZones.isEmpty()) {
|
||||
if (!availabilityZones.contains(instance.getPlacement().getAvailabilityZone())) {
|
||||
logger.trace("filtering out instance {} based on availability_zone {}, not part of {}", instance.getInstanceId(), instance.getPlacement().getAvailabilityZone(), availabilityZones);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// see if we need to filter by tags
|
||||
boolean filterByTag = false;
|
||||
if (!tags.isEmpty()) {
|
||||
if (instance.getTags() == null) {
|
||||
filterByTag = true;
|
||||
} else {
|
||||
// check that all tags listed are there on the instance
|
||||
for (Map.Entry<String, String> entry : tags.entrySet()) {
|
||||
boolean found = false;
|
||||
for (Tag tag : instance.getTags()) {
|
||||
if (entry.getKey().equals(tag.getKey()) && entry.getValue().equals(tag.getValue())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
filterByTag = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (filterByTag) {
|
||||
logger.trace("filtering out instance {} based tags {}, not part of {}", instance.getInstanceId(), tags, instance.getTags());
|
||||
continue;
|
||||
}
|
||||
|
||||
InstanceState state = instance.getState();
|
||||
if (state.getName().equalsIgnoreCase("pending") || state.getName().equalsIgnoreCase("running")) {
|
||||
String address = null;
|
||||
switch (hostType) {
|
||||
case PRIVATE_DNS:
|
||||
address = instance.getPrivateDnsName();
|
||||
break;
|
||||
case PRIVATE_IP:
|
||||
address = instance.getPrivateIpAddress();
|
||||
break;
|
||||
case PUBLIC_DNS:
|
||||
address = instance.getPublicDnsName();
|
||||
break;
|
||||
case PUBLIC_IP:
|
||||
address = instance.getPublicDnsName();
|
||||
break;
|
||||
}
|
||||
if (address != null) {
|
||||
try {
|
||||
TransportAddress[] addresses = transportService.addressesFromString(address);
|
||||
// we only limit to 1 addresses, makes no sense to ping 100 ports
|
||||
for (int i = 0; (i < addresses.length && i < UnicastZenPing.LIMIT_PORTS_COUNT); i++) {
|
||||
logger.trace("adding {}, address {}, transport_address {}", instance.getInstanceId(), address, addresses[i]);
|
||||
discoNodes.add(new DiscoveryNode("#cloud-" + instance.getInstanceId() + "-" + i, addresses[i]));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed ot add {}, address {}", e, instance.getInstanceId(), address);
|
||||
}
|
||||
} else {
|
||||
logger.trace("not adding {}, address is null, host_type {}", instance.getInstanceId(), hostType);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("using dynamic discovery nodes {}", discoNodes);
|
||||
|
||||
return discoNodes;
|
||||
}
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import org.elasticsearch.cloud.aws.AwsEc2Service;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodeService;
|
||||
import org.elasticsearch.common.collect.ImmutableList;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.discovery.zen.ZenDiscovery;
|
||||
import org.elasticsearch.discovery.zen.ping.ZenPing;
|
||||
import org.elasticsearch.discovery.zen.ping.ZenPingService;
|
||||
import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class Ec2Discovery extends ZenDiscovery {
|
||||
|
||||
@Inject public Ec2Discovery(Settings settings, ClusterName clusterName, ThreadPool threadPool, TransportService transportService,
|
||||
ClusterService clusterService, NodeSettingsService nodeSettingsService, ZenPingService pingService,
|
||||
DiscoveryNodeService discoveryNodeService, AwsEc2Service ec2Service) {
|
||||
super(settings, clusterName, threadPool, transportService, clusterService, nodeSettingsService, discoveryNodeService, pingService);
|
||||
if (settings.getAsBoolean("cloud.enabled", true)) {
|
||||
ImmutableList<? extends ZenPing> zenPings = pingService.zenPings();
|
||||
UnicastZenPing unicastZenPing = null;
|
||||
for (ZenPing zenPing : zenPings) {
|
||||
if (zenPing instanceof UnicastZenPing) {
|
||||
unicastZenPing = (UnicastZenPing) zenPing;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (unicastZenPing != null) {
|
||||
// update the unicast zen ping to add cloud hosts provider
|
||||
// and, while we are at it, use only it and not the multicast for example
|
||||
unicastZenPing.addHostsProvider(new AwsEc2UnicastHostsProvider(settings, transportService, ec2Service.client()));
|
||||
pingService.zenPings(ImmutableList.of(unicastZenPing));
|
||||
} else {
|
||||
logger.warn("failed to apply ec2 unicast discovery, no unicast ping found");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import org.elasticsearch.discovery.Discovery;
|
||||
import org.elasticsearch.discovery.zen.ZenDiscoveryModule;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class Ec2DiscoveryModule extends ZenDiscoveryModule {
|
||||
|
||||
@Override protected void bindDiscovery() {
|
||||
bind(Discovery.class).to(Ec2Discovery.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -1,104 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gateway.s3;
|
||||
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.cloud.aws.AwsS3Service;
|
||||
import org.elasticsearch.cloud.aws.blobstore.S3BlobStore;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.DynamicExecutors;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.gateway.blobstore.BlobStoreGateway;
|
||||
import org.elasticsearch.index.gateway.s3.S3IndexGatewayModule;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class S3Gateway extends BlobStoreGateway {
|
||||
|
||||
private final ExecutorService concurrentStreamPool;
|
||||
|
||||
@Inject public S3Gateway(Settings settings, ThreadPool threadPool, ClusterService clusterService,
|
||||
ClusterName clusterName, AwsS3Service s3Service) throws IOException {
|
||||
super(settings, threadPool, clusterService);
|
||||
|
||||
String bucket = componentSettings.get("bucket");
|
||||
if (bucket == null) {
|
||||
throw new ElasticSearchIllegalArgumentException("No bucket defined for s3 gateway");
|
||||
}
|
||||
|
||||
String region = componentSettings.get("region");
|
||||
if (region == null) {
|
||||
if (settings.get("cloud.aws.region") != null) {
|
||||
String regionSetting = settings.get("cloud.aws.region");
|
||||
if ("us-east".equals(regionSetting.toLowerCase())) {
|
||||
region = null;
|
||||
} else if ("us-east-1".equals(regionSetting.toLowerCase())) {
|
||||
region = null;
|
||||
} else if ("us-west".equals(regionSetting.toLowerCase())) {
|
||||
region = "us-west-1";
|
||||
} else if ("us-west-1".equals(regionSetting.toLowerCase())) {
|
||||
region = "us-west-1";
|
||||
} else if ("ap-southeast".equals(regionSetting.toLowerCase())) {
|
||||
region = "ap-southeast-1";
|
||||
} else if ("ap-southeast-1".equals(regionSetting.toLowerCase())) {
|
||||
region = "ap-southeast-1";
|
||||
} else if ("eu-west".equals(regionSetting.toLowerCase())) {
|
||||
region = "EU";
|
||||
} else if ("eu-west-1".equals(regionSetting.toLowerCase())) {
|
||||
region = "EU";
|
||||
}
|
||||
}
|
||||
}
|
||||
ByteSizeValue chunkSize = componentSettings.getAsBytesSize("chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB));
|
||||
|
||||
int concurrentStreams = componentSettings.getAsInt("concurrent_streams", 5);
|
||||
this.concurrentStreamPool = DynamicExecutors.newScalingThreadPool(1, concurrentStreams, TimeValue.timeValueSeconds(5).millis(), EsExecutors.daemonThreadFactory(settings, "[s3_stream]"));
|
||||
|
||||
logger.debug("using bucket [{}], region [{}], chunk_size [{}], concurrent_streams [{}]", bucket, region, chunkSize, concurrentStreams);
|
||||
|
||||
initialize(new S3BlobStore(settings, s3Service.client(), bucket, region, concurrentStreamPool), clusterName, chunkSize);
|
||||
}
|
||||
|
||||
@Override protected void doClose() throws ElasticSearchException {
|
||||
super.doClose();
|
||||
concurrentStreamPool.shutdown();
|
||||
}
|
||||
|
||||
@Override public String type() {
|
||||
return "s3";
|
||||
}
|
||||
|
||||
@Override public Class<? extends Module> suggestIndexGateway() {
|
||||
return S3IndexGatewayModule.class;
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gateway.s3;
|
||||
|
||||
import org.elasticsearch.gateway.Gateway;
|
||||
import org.elasticsearch.gateway.blobstore.BlobStoreGatewayModule;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class S3GatewayModule extends BlobStoreGatewayModule {
|
||||
|
||||
@Override protected void configure() {
|
||||
bind(Gateway.class).to(S3Gateway.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.gateway.s3;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.gateway.Gateway;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.gateway.IndexShardGateway;
|
||||
import org.elasticsearch.index.gateway.blobstore.BlobStoreIndexGateway;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class S3IndexGateway extends BlobStoreIndexGateway {
|
||||
|
||||
@Inject public S3IndexGateway(Index index, @IndexSettings Settings indexSettings, Gateway gateway) {
|
||||
super(index, indexSettings, gateway);
|
||||
}
|
||||
|
||||
@Override public String type() {
|
||||
return "s3";
|
||||
}
|
||||
|
||||
@Override public Class<? extends IndexShardGateway> shardGatewayClass() {
|
||||
return S3IndexShardGateway.class;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.gateway.s3;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.index.gateway.IndexGateway;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class S3IndexGatewayModule extends AbstractModule {
|
||||
|
||||
@Override protected void configure() {
|
||||
bind(IndexGateway.class).to(S3IndexGateway.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.gateway.s3;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.gateway.IndexGateway;
|
||||
import org.elasticsearch.index.gateway.blobstore.BlobStoreIndexShardGateway;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.shard.service.IndexShard;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class S3IndexShardGateway extends BlobStoreIndexShardGateway {
|
||||
|
||||
@Inject public S3IndexShardGateway(ShardId shardId, @IndexSettings Settings indexSettings, ThreadPool threadPool, IndexGateway indexGateway,
|
||||
IndexShard indexShard, Store store) {
|
||||
super(shardId, indexSettings, threadPool, indexGateway, indexShard, store);
|
||||
}
|
||||
|
||||
@Override public String type() {
|
||||
return "s3";
|
||||
}
|
||||
}
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.cloud.aws;
|
||||
|
||||
import org.elasticsearch.cloud.aws.AwsEc2Service;
|
||||
import org.elasticsearch.cloud.aws.AwsModule;
|
||||
import org.elasticsearch.cloud.aws.AwsS3Service;
|
||||
import org.elasticsearch.common.collect.Lists;
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class CloudAwsPlugin extends AbstractPlugin {
|
||||
|
||||
private final Settings settings;
|
||||
|
||||
public CloudAwsPlugin(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
@Override public String name() {
|
||||
return "cloud-aws";
|
||||
}
|
||||
|
||||
@Override public String description() {
|
||||
return "Cloud AWS Plugin";
|
||||
}
|
||||
|
||||
@Override public Collection<Class<? extends Module>> modules() {
|
||||
Collection<Class<? extends Module>> modules = Lists.newArrayList();
|
||||
if (settings.getAsBoolean("cloud.enabled", true)) {
|
||||
modules.add(AwsModule.class);
|
||||
}
|
||||
return modules;
|
||||
}
|
||||
|
||||
@Override public Collection<Class<? extends LifecycleComponent>> services() {
|
||||
Collection<Class<? extends LifecycleComponent>> services = Lists.newArrayList();
|
||||
if (settings.getAsBoolean("cloud.enabled", true)) {
|
||||
services.add(AwsS3Service.class);
|
||||
services.add(AwsEc2Service.class);
|
||||
}
|
||||
return services;
|
||||
}
|
||||
}
|
|
@ -1,138 +0,0 @@
|
|||
dependsOn(':elasticsearch')
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
apply plugin: 'eclipse'
|
||||
|
||||
archivesBaseName = "elasticsearch-hadoop"
|
||||
|
||||
explodedDistDir = new File(distsDir, 'exploded')
|
||||
|
||||
configurations.compile.transitive = true
|
||||
configurations.testCompile.transitive = true
|
||||
|
||||
// no need to use the resource dir
|
||||
sourceSets.main.resources.srcDirs 'src/main/java'
|
||||
sourceSets.test.resources.srcDirs 'src/test/java'
|
||||
|
||||
// add the source files to the dist jar
|
||||
//jar {
|
||||
// from sourceSets.main.allJava
|
||||
//}
|
||||
|
||||
configurations {
|
||||
dists
|
||||
distLib {
|
||||
visible = false
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':elasticsearch')
|
||||
compile("org.apache.hadoop:hadoop-core:0.20.2") { transitive = false }
|
||||
runtime("commons-logging:commons-logging:1.1.1") { transitive = false }
|
||||
|
||||
distLib("org.apache.hadoop:hadoop-core:0.20.2") { transitive = false }
|
||||
distLib("commons-logging:commons-logging:1.1.1") { transitive = false }
|
||||
}
|
||||
|
||||
task explodedDist(dependsOn: [jar], description: 'Builds the plugin zip file') << {
|
||||
[explodedDistDir]*.mkdirs()
|
||||
|
||||
copy {
|
||||
from configurations.distLib
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
// remove elasticsearch files (compile above adds the elasticsearch one)
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*.jar") }
|
||||
|
||||
copy {
|
||||
from libsDir
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-javadoc.jar") }
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-sources.jar") }
|
||||
}
|
||||
|
||||
task zip(type: Zip, dependsOn: ['explodedDist']) {
|
||||
from(explodedDistDir) {
|
||||
}
|
||||
}
|
||||
|
||||
task release(dependsOn: [zip]) << {
|
||||
ant.delete(dir: explodedDistDir)
|
||||
copy {
|
||||
from distsDir
|
||||
into(new File(rootProject.distsDir, "plugins"))
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
deployerJars
|
||||
}
|
||||
|
||||
dependencies {
|
||||
deployerJars "org.apache.maven.wagon:wagon-http:1.0-beta-2"
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier = 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
jar {
|
||||
// from sourceSets.main.allJava
|
||||
manifest {
|
||||
attributes("Implementation-Title": "ElasticSearch", "Implementation-Version": rootProject.version, "Implementation-Date": buildTimeStr)
|
||||
}
|
||||
}
|
||||
|
||||
artifacts {
|
||||
archives sourcesJar
|
||||
archives javadocJar
|
||||
}
|
||||
|
||||
uploadArchives {
|
||||
repositories.mavenDeployer {
|
||||
configuration = configurations.deployerJars
|
||||
repository(url: rootProject.mavenRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
snapshotRepository(url: rootProject.mavenSnapshotRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
|
||||
pom.project {
|
||||
inceptionYear '2009'
|
||||
name 'elasticsearch-plugins-hadoop'
|
||||
description 'Hadoop Plugin for ElasticSearch'
|
||||
licenses {
|
||||
license {
|
||||
name 'The Apache Software License, Version 2.0'
|
||||
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
distribution 'repo'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection 'git://github.com/elasticsearch/elasticsearch.git'
|
||||
developerConnection 'git@github.com:elasticsearch/elasticsearch.git'
|
||||
url 'http://github.com/elasticsearch/elasticsearch'
|
||||
}
|
||||
}
|
||||
|
||||
pom.whenConfigured {pom ->
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> dep.scope != 'test' } // removes the test scoped ones
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eclipseClasspath {
|
||||
defaultOutputDir = file('build/eclipse-build')
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
plugin=org.elasticsearch.plugin.hadoop.HadoopPlugin
|
|
@ -1,119 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.blobstore.hdfs;
|
||||
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.PathFilter;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
|
||||
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
|
||||
import org.elasticsearch.common.collect.ImmutableMap;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public abstract class AbstractHdfsBlobContainer extends AbstractBlobContainer {
|
||||
|
||||
protected final HdfsBlobStore blobStore;
|
||||
|
||||
protected final Path path;
|
||||
|
||||
public AbstractHdfsBlobContainer(HdfsBlobStore blobStore, BlobPath blobPath, Path path) {
|
||||
super(blobPath);
|
||||
this.blobStore = blobStore;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public ImmutableMap<String, BlobMetaData> listBlobs() throws IOException {
|
||||
FileStatus[] files = blobStore.fileSystem().listStatus(path);
|
||||
if (files == null || files.length == 0) {
|
||||
return ImmutableMap.of();
|
||||
}
|
||||
ImmutableMap.Builder<String, BlobMetaData> builder = ImmutableMap.builder();
|
||||
for (FileStatus file : files) {
|
||||
builder.put(file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen()));
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override public ImmutableMap<String, BlobMetaData> listBlobsByPrefix(final String blobNamePrefix) throws IOException {
|
||||
FileStatus[] files = blobStore.fileSystem().listStatus(path, new PathFilter() {
|
||||
@Override public boolean accept(Path path) {
|
||||
return path.getName().startsWith(blobNamePrefix);
|
||||
}
|
||||
});
|
||||
if (files == null || files.length == 0) {
|
||||
return ImmutableMap.of();
|
||||
}
|
||||
ImmutableMap.Builder<String, BlobMetaData> builder = ImmutableMap.builder();
|
||||
for (FileStatus file : files) {
|
||||
builder.put(file.getPath().getName(), new PlainBlobMetaData(file.getPath().getName(), file.getLen()));
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public boolean deleteBlob(String blobName) throws IOException {
|
||||
return blobStore.fileSystem().delete(new Path(path, blobName), true);
|
||||
}
|
||||
|
||||
@Override public boolean blobExists(String blobName) {
|
||||
try {
|
||||
return blobStore.fileSystem().exists(new Path(path, blobName));
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void readBlob(final String blobName, final ReadBlobListener listener) {
|
||||
blobStore.executor().execute(new Runnable() {
|
||||
@Override public void run() {
|
||||
byte[] buffer = new byte[blobStore.bufferSizeInBytes()];
|
||||
|
||||
|
||||
FSDataInputStream fileStream;
|
||||
try {
|
||||
fileStream = blobStore.fileSystem().open(new Path(path, blobName));
|
||||
} catch (IOException e) {
|
||||
listener.onFailure(e);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
int bytesRead;
|
||||
while ((bytesRead = fileStream.read(buffer)) != -1) {
|
||||
listener.onPartial(buffer, 0, bytesRead);
|
||||
}
|
||||
listener.onCompleted();
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
fileStream.close();
|
||||
} catch (IOException e1) {
|
||||
// ignore
|
||||
}
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,111 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.blobstore.hdfs;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.BlobStore;
|
||||
import org.elasticsearch.common.blobstore.ImmutableBlobContainer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class HdfsBlobStore implements BlobStore {
|
||||
|
||||
private final FileSystem fileSystem;
|
||||
|
||||
private final Path path;
|
||||
|
||||
private final Executor executor;
|
||||
|
||||
private final int bufferSizeInBytes;
|
||||
|
||||
public HdfsBlobStore(Settings settings, FileSystem fileSystem, Executor executor, Path path) throws IOException {
|
||||
this.fileSystem = fileSystem;
|
||||
this.path = path;
|
||||
|
||||
if (!fileSystem.exists(path)) {
|
||||
fileSystem.mkdirs(path);
|
||||
}
|
||||
|
||||
this.bufferSizeInBytes = (int) settings.getAsBytesSize("buffer_size", new ByteSizeValue(100, ByteSizeUnit.KB)).bytes();
|
||||
this.executor = executor;
|
||||
}
|
||||
|
||||
@Override public String toString() {
|
||||
return path.toString();
|
||||
}
|
||||
|
||||
public int bufferSizeInBytes() {
|
||||
return this.bufferSizeInBytes;
|
||||
}
|
||||
|
||||
public FileSystem fileSystem() {
|
||||
return fileSystem;
|
||||
}
|
||||
|
||||
public Path path() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public Executor executor() {
|
||||
return executor;
|
||||
}
|
||||
|
||||
@Override public ImmutableBlobContainer immutableBlobContainer(BlobPath path) {
|
||||
return new HdfsImmutableBlobContainer(this, path, buildAndCreate(path));
|
||||
}
|
||||
|
||||
@Override public void delete(BlobPath path) {
|
||||
try {
|
||||
fileSystem.delete(buildPath(path), true);
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void close() {
|
||||
}
|
||||
|
||||
private Path buildAndCreate(BlobPath blobPath) {
|
||||
Path path = buildPath(blobPath);
|
||||
try {
|
||||
fileSystem.mkdirs(path);
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
private Path buildPath(BlobPath blobPath) {
|
||||
Path path = this.path;
|
||||
for (String p : blobPath) {
|
||||
path = new Path(path, p);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
}
|
|
@ -1,90 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.blobstore.hdfs;
|
||||
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.ImmutableBlobContainer;
|
||||
import org.elasticsearch.common.blobstore.support.BlobStores;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class HdfsImmutableBlobContainer extends AbstractHdfsBlobContainer implements ImmutableBlobContainer {
|
||||
|
||||
public HdfsImmutableBlobContainer(HdfsBlobStore blobStore, BlobPath blobPath, Path path) {
|
||||
super(blobStore, blobPath, path);
|
||||
}
|
||||
|
||||
@Override public void writeBlob(final String blobName, final InputStream is, final long sizeInBytes, final WriterListener listener) {
|
||||
blobStore.executor().execute(new Runnable() {
|
||||
@Override public void run() {
|
||||
Path file = new Path(path, blobName);
|
||||
|
||||
FSDataOutputStream fileStream;
|
||||
try {
|
||||
fileStream = blobStore.fileSystem().create(file, true);
|
||||
} catch (IOException e) {
|
||||
listener.onFailure(e);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
try {
|
||||
byte[] buffer = new byte[blobStore.bufferSizeInBytes()];
|
||||
int bytesRead;
|
||||
while ((bytesRead = is.read(buffer)) != -1) {
|
||||
fileStream.write(buffer, 0, bytesRead);
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
is.close();
|
||||
} catch (IOException ex) {
|
||||
// do nothing
|
||||
}
|
||||
try {
|
||||
fileStream.close();
|
||||
} catch (IOException ex) {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
listener.onCompleted();
|
||||
} catch (Exception e) {
|
||||
// just on the safe size, try and delete it on failure
|
||||
try {
|
||||
if (blobStore.fileSystem().exists(file)) {
|
||||
blobStore.fileSystem().delete(file, true);
|
||||
}
|
||||
} catch (Exception e1) {
|
||||
// ignore
|
||||
}
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override public void writeBlob(String blobName, InputStream is, long sizeInBytes) throws IOException {
|
||||
BlobStores.syncWriteBlob(this, blobName, is, sizeInBytes);
|
||||
}
|
||||
}
|
|
@ -1,105 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gateway.hdfs;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.common.blobstore.hdfs.HdfsBlobStore;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.DynamicExecutors;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.gateway.blobstore.BlobStoreGateway;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class HdfsGateway extends BlobStoreGateway {
|
||||
|
||||
private final boolean closeFileSystem;
|
||||
|
||||
private final FileSystem fileSystem;
|
||||
|
||||
private final ExecutorService concurrentStreamPool;
|
||||
|
||||
@Inject public HdfsGateway(Settings settings, ThreadPool threadPool, ClusterService clusterService,
|
||||
ClusterName clusterName) throws IOException {
|
||||
super(settings, threadPool, clusterService);
|
||||
|
||||
this.closeFileSystem = componentSettings.getAsBoolean("close_fs", true);
|
||||
String uri = componentSettings.get("uri");
|
||||
if (uri == null) {
|
||||
throw new ElasticSearchIllegalArgumentException("hdfs gateway requires the 'uri' setting to be set");
|
||||
}
|
||||
String path = componentSettings.get("path");
|
||||
if (path == null) {
|
||||
throw new ElasticSearchIllegalArgumentException("hdfs gateway requires the 'path' path setting to be set");
|
||||
}
|
||||
Path hPath = new Path(new Path(path), clusterName.value());
|
||||
|
||||
int concurrentStreams = componentSettings.getAsInt("concurrent_streams", 5);
|
||||
this.concurrentStreamPool = DynamicExecutors.newScalingThreadPool(1, concurrentStreams, TimeValue.timeValueSeconds(5).millis(), EsExecutors.daemonThreadFactory(settings, "[s3_stream]"));
|
||||
|
||||
logger.debug("Using uri [{}], path [{}], concurrent_streams [{}]", uri, hPath, concurrentStreams);
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
Settings hdfsSettings = settings.getByPrefix("hdfs.conf.");
|
||||
for (Map.Entry<String, String> entry : hdfsSettings.getAsMap().entrySet()) {
|
||||
conf.set(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
fileSystem = FileSystem.get(URI.create(uri), conf);
|
||||
|
||||
initialize(new HdfsBlobStore(settings, fileSystem, concurrentStreamPool, hPath), clusterName, null);
|
||||
}
|
||||
|
||||
@Override public String type() {
|
||||
return "hdfs";
|
||||
}
|
||||
|
||||
@Override public Class<? extends Module> suggestIndexGateway() {
|
||||
return HdfsIndexGatewayModule.class;
|
||||
}
|
||||
|
||||
@Override protected void doClose() throws ElasticSearchException {
|
||||
super.doClose();
|
||||
if (closeFileSystem) {
|
||||
try {
|
||||
fileSystem.close();
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
concurrentStreamPool.shutdown();
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gateway.hdfs;
|
||||
|
||||
import org.elasticsearch.gateway.Gateway;
|
||||
import org.elasticsearch.gateway.blobstore.BlobStoreGatewayModule;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class HdfsGatewayModule extends BlobStoreGatewayModule {
|
||||
|
||||
@Override protected void configure() {
|
||||
bind(Gateway.class).to(HdfsGateway.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.gateway.hdfs;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.index.gateway.IndexGateway;
|
||||
import org.elasticsearch.index.gateway.hdfs.HdfsIndexGateway;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
*/
|
||||
public class HdfsIndexGatewayModule extends AbstractModule {
|
||||
|
||||
@Override protected void configure() {
|
||||
bind(IndexGateway.class).to(HdfsIndexGateway.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.gateway.hdfs;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.gateway.Gateway;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.gateway.IndexShardGateway;
|
||||
import org.elasticsearch.index.gateway.blobstore.BlobStoreIndexGateway;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class HdfsIndexGateway extends BlobStoreIndexGateway {
|
||||
|
||||
@Inject public HdfsIndexGateway(Index index, @IndexSettings Settings indexSettings, Gateway gateway) {
|
||||
super(index, indexSettings, gateway);
|
||||
}
|
||||
|
||||
@Override public String type() {
|
||||
return "hdfs";
|
||||
}
|
||||
|
||||
@Override public Class<? extends IndexShardGateway> shardGatewayClass() {
|
||||
return HdfsIndexShardGateway.class;
|
||||
}
|
||||
}
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.gateway.hdfs;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.gateway.IndexGateway;
|
||||
import org.elasticsearch.index.gateway.blobstore.BlobStoreIndexShardGateway;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.shard.service.IndexShard;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class HdfsIndexShardGateway extends BlobStoreIndexShardGateway {
|
||||
|
||||
@Inject public HdfsIndexShardGateway(ShardId shardId, @IndexSettings Settings indexSettings, ThreadPool threadPool, IndexGateway hdfsIndexGateway,
|
||||
IndexShard indexShard, Store store) {
|
||||
super(shardId, indexSettings, threadPool, hdfsIndexGateway, indexShard, store);
|
||||
}
|
||||
|
||||
@Override public String type() {
|
||||
return "hdfs";
|
||||
}
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.hadoop;
|
||||
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class HadoopPlugin extends AbstractPlugin {
|
||||
|
||||
@Override public String name() {
|
||||
return "hadoop";
|
||||
}
|
||||
|
||||
@Override public String description() {
|
||||
return "Hadoop Plugin";
|
||||
}
|
||||
}
|
|
@ -1,226 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.hadoop.gateway;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.network.NetworkUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.gateway.Gateway;
|
||||
import org.elasticsearch.indices.IndexAlreadyExistsException;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.internal.InternalNode;
|
||||
import org.testng.annotations.AfterMethod;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.client.Requests.*;
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.*;
|
||||
import static org.elasticsearch.node.NodeBuilder.*;
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class HdfsGatewayTests {
|
||||
|
||||
protected final ESLogger logger = Loggers.getLogger(getClass());
|
||||
|
||||
private Node node;
|
||||
|
||||
@BeforeMethod void setUpNodes() throws Exception {
|
||||
// start the node and reset the gateway
|
||||
node = buildNode();
|
||||
((InternalNode) node).injector().getInstance(Gateway.class).reset();
|
||||
node.close();
|
||||
// now start the node clean
|
||||
node = buildNode().start();
|
||||
}
|
||||
|
||||
private Node buildNode() {
|
||||
Settings settings = settingsBuilder()
|
||||
.put("cluster.name", "test-cluster-" + NetworkUtils.getLocalAddress().getHostName())
|
||||
.put("gateway.type", "hdfs")
|
||||
.put("gateway.hdfs.uri", "file:///")
|
||||
// .put("gateway.hdfs.uri", "hdfs://training-vm.local:8022")
|
||||
.put("gateway.hdfs.path", "data/hdfs/gateway")
|
||||
.build();
|
||||
return nodeBuilder().settings(settingsBuilder().put(settings).put("node.name", "node1")).build();
|
||||
}
|
||||
|
||||
@AfterMethod void closeNodes() throws Exception {
|
||||
node.stop();
|
||||
((InternalNode) node).injector().getInstance(Gateway.class).reset();
|
||||
node.close();
|
||||
}
|
||||
|
||||
@Test public void testHdfsGateway() throws Exception {
|
||||
// first, test meta data
|
||||
CreateIndexResponse createIndexResponse = node.client().admin().indices().create(createIndexRequest("test")).actionGet();
|
||||
assertThat(createIndexResponse.acknowledged(), equalTo(true));
|
||||
node.close();
|
||||
node = buildNode().start();
|
||||
|
||||
logger.info("--> waiting for green status");
|
||||
ClusterHealthResponse health = node.client().admin().cluster().prepareHealth().setWaitForYellowStatus().execute().actionGet();
|
||||
assertThat(health.timedOut(), equalTo(false));
|
||||
|
||||
try {
|
||||
node.client().admin().indices().create(createIndexRequest("test")).actionGet();
|
||||
assert false : "index should exists";
|
||||
} catch (IndexAlreadyExistsException e) {
|
||||
// all is well
|
||||
}
|
||||
|
||||
logger.info("Running Cluster Health (wait for the shards to startup)");
|
||||
ClusterHealthResponse clusterHealth = node.client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(1)).actionGet();
|
||||
logger.info("Done Cluster Health, status " + clusterHealth.status());
|
||||
assertThat(clusterHealth.timedOut(), equalTo(false));
|
||||
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.YELLOW));
|
||||
|
||||
// Translog tests
|
||||
// create a mapping
|
||||
PutMappingResponse putMappingResponse = node.client().admin().indices().putMapping(putMappingRequest("test").type("type1")
|
||||
.source(mappingSource())).actionGet();
|
||||
assertThat(putMappingResponse.acknowledged(), equalTo(true));
|
||||
|
||||
// verify that mapping is there
|
||||
ClusterStateResponse clusterState = node.client().admin().cluster().state(clusterStateRequest()).actionGet();
|
||||
assertThat(clusterState.state().metaData().index("test").mapping("type1"), notNullValue());
|
||||
|
||||
// create two and delete the first
|
||||
logger.info("Indexing #1");
|
||||
node.client().index(Requests.indexRequest("test").type("type1").id("1").source(source("1", "test"))).actionGet();
|
||||
logger.info("Indexing #2");
|
||||
node.client().index(Requests.indexRequest("test").type("type1").id("2").source(source("2", "test"))).actionGet();
|
||||
logger.info("Deleting #1");
|
||||
node.client().delete(deleteRequest("test").type("type1").id("1")).actionGet();
|
||||
|
||||
// perform snapshot to the index
|
||||
logger.info("Gateway Snapshot");
|
||||
node.client().admin().indices().gatewaySnapshot(gatewaySnapshotRequest("test")).actionGet();
|
||||
logger.info("Gateway Snapshot (should be a no op)");
|
||||
// do it again, it should be a no op
|
||||
node.client().admin().indices().gatewaySnapshot(gatewaySnapshotRequest("test")).actionGet();
|
||||
|
||||
logger.info("Closing the server");
|
||||
node.close();
|
||||
logger.info("Starting the server, should recover from the gateway (only translog should be populated)");
|
||||
node = buildNode().start();
|
||||
|
||||
logger.info("Running Cluster Health (wait for the shards to startup)");
|
||||
clusterHealth = node.client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(1)).actionGet();
|
||||
logger.info("Done Cluster Health, status " + clusterHealth.status());
|
||||
assertThat(clusterHealth.timedOut(), equalTo(false));
|
||||
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.YELLOW));
|
||||
|
||||
// verify that mapping is there
|
||||
clusterState = node.client().admin().cluster().state(clusterStateRequest()).actionGet();
|
||||
assertThat(clusterState.state().metaData().index("test").mapping("type1"), notNullValue());
|
||||
|
||||
logger.info("Getting #1, should not exists");
|
||||
GetResponse getResponse = node.client().get(getRequest("test").type("type1").id("1")).actionGet();
|
||||
assertThat(getResponse.exists(), equalTo(false));
|
||||
logger.info("Getting #2");
|
||||
getResponse = node.client().get(getRequest("test").type("type1").id("2")).actionGet();
|
||||
assertThat(getResponse.sourceAsString(), equalTo(source("2", "test")));
|
||||
|
||||
// Now flush and add some data (so we have index recovery as well)
|
||||
logger.info("Flushing, so we have actual content in the index files (#2 should be in the index)");
|
||||
node.client().admin().indices().flush(flushRequest("test")).actionGet();
|
||||
logger.info("Indexing #3, so we have something in the translog as well");
|
||||
node.client().index(Requests.indexRequest("test").type("type1").id("3").source(source("3", "test"))).actionGet();
|
||||
|
||||
logger.info("Gateway Snapshot");
|
||||
node.client().admin().indices().gatewaySnapshot(gatewaySnapshotRequest("test")).actionGet();
|
||||
logger.info("Gateway Snapshot (should be a no op)");
|
||||
node.client().admin().indices().gatewaySnapshot(gatewaySnapshotRequest("test")).actionGet();
|
||||
|
||||
logger.info("Closing the server");
|
||||
node.close();
|
||||
logger.info("Starting the server, should recover from the gateway (both index and translog)");
|
||||
node = buildNode().start();
|
||||
|
||||
logger.info("Running Cluster Health (wait for the shards to startup)");
|
||||
clusterHealth = node.client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(1)).actionGet();
|
||||
logger.info("Done Cluster Health, status " + clusterHealth.status());
|
||||
assertThat(clusterHealth.timedOut(), equalTo(false));
|
||||
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.YELLOW));
|
||||
|
||||
logger.info("Getting #1, should not exists");
|
||||
getResponse = node.client().get(getRequest("test").type("type1").id("1")).actionGet();
|
||||
assertThat(getResponse.exists(), equalTo(false));
|
||||
logger.info("Getting #2 (not from the translog, but from the index)");
|
||||
getResponse = node.client().get(getRequest("test").type("type1").id("2")).actionGet();
|
||||
assertThat(getResponse.sourceAsString(), equalTo(source("2", "test")));
|
||||
logger.info("Getting #3 (from the translog)");
|
||||
getResponse = node.client().get(getRequest("test").type("type1").id("3")).actionGet();
|
||||
assertThat(getResponse.sourceAsString(), equalTo(source("3", "test")));
|
||||
|
||||
logger.info("Flushing, so we have actual content in the index files (#3 should be in the index now as well)");
|
||||
node.client().admin().indices().flush(flushRequest("test")).actionGet();
|
||||
|
||||
logger.info("Gateway Snapshot");
|
||||
node.client().admin().indices().gatewaySnapshot(gatewaySnapshotRequest("test")).actionGet();
|
||||
logger.info("Gateway Snapshot (should be a no op)");
|
||||
node.client().admin().indices().gatewaySnapshot(gatewaySnapshotRequest("test")).actionGet();
|
||||
|
||||
logger.info("Closing the server");
|
||||
node.close();
|
||||
logger.info("Starting the server, should recover from the gateway (just from the index, nothing in the translog)");
|
||||
node = buildNode().start();
|
||||
|
||||
logger.info("Running Cluster Health (wait for the shards to startup)");
|
||||
clusterHealth = node.client().admin().cluster().health(clusterHealthRequest().waitForYellowStatus().waitForActiveShards(1)).actionGet();
|
||||
logger.info("Done Cluster Health, status " + clusterHealth.status());
|
||||
assertThat(clusterHealth.timedOut(), equalTo(false));
|
||||
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.YELLOW));
|
||||
|
||||
logger.info("Getting #1, should not exists");
|
||||
getResponse = node.client().get(getRequest("test").type("type1").id("1")).actionGet();
|
||||
assertThat(getResponse.exists(), equalTo(false));
|
||||
logger.info("Getting #2 (not from the translog, but from the index)");
|
||||
getResponse = node.client().get(getRequest("test").type("type1").id("2")).actionGet();
|
||||
assertThat(getResponse.sourceAsString(), equalTo(source("2", "test")));
|
||||
logger.info("Getting #3 (not from the translog, but from the index)");
|
||||
getResponse = node.client().get(getRequest("test").type("type1").id("3")).actionGet();
|
||||
assertThat(getResponse.sourceAsString(), equalTo(source("3", "test")));
|
||||
|
||||
logger.info("Deleting the index");
|
||||
node.client().admin().indices().delete(deleteIndexRequest("test")).actionGet();
|
||||
}
|
||||
|
||||
|
||||
private String mappingSource() {
|
||||
return "{ type1 : { properties : { name : { type : \"string\" } } } }";
|
||||
}
|
||||
|
||||
private String source(String id, String nameValue) {
|
||||
return "{ type1 : { \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" } }";
|
||||
}
|
||||
}
|
|
@ -1,134 +0,0 @@
|
|||
dependsOn(':elasticsearch')
|
||||
|
||||
apply plugin: 'groovy'
|
||||
apply plugin: 'maven'
|
||||
|
||||
archivesBaseName = "elasticsearch-lang-groovy"
|
||||
|
||||
explodedDistDir = new File(distsDir, 'exploded')
|
||||
|
||||
configurations.compile.transitive = true
|
||||
configurations.testCompile.transitive = true
|
||||
|
||||
// no need to use the resource dir
|
||||
sourceSets.main.resources.srcDirs 'src/main/groovy', 'src/main/java'
|
||||
sourceSets.test.resources.srcDirs 'src/test/groovy', 'src/test/java'
|
||||
|
||||
// add the source files to the dist jar
|
||||
//jar {
|
||||
// from sourceSets.main.allSource
|
||||
//}
|
||||
|
||||
configurations {
|
||||
dists
|
||||
distLib {
|
||||
visible = false
|
||||
transitive = false
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':elasticsearch')
|
||||
|
||||
groovy group: 'org.codehaus.groovy', name: 'groovy-all', version: '1.8.2'
|
||||
distLib('org.codehaus.groovy:groovy-all:1.8.2') { transitive = false }
|
||||
testCompile('junit:junit:4.8.1') {transitive = false}
|
||||
}
|
||||
|
||||
task explodedDist(dependsOn: [jar], description: 'Builds the plugin zip file') << {
|
||||
[explodedDistDir]*.mkdirs()
|
||||
|
||||
copy {
|
||||
from configurations.distLib
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
// remove elasticsearch files (compile above adds the elasticsearch one)
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*.jar") }
|
||||
|
||||
copy {
|
||||
from libsDir
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-javadoc.jar") }
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-sources.jar") }
|
||||
}
|
||||
|
||||
task zip(type: Zip, dependsOn: ['explodedDist']) {
|
||||
from(explodedDistDir) {
|
||||
}
|
||||
}
|
||||
|
||||
task release(dependsOn: [zip]) << {
|
||||
ant.delete(dir: explodedDistDir)
|
||||
copy {
|
||||
from distsDir
|
||||
into(new File(rootProject.distsDir, "plugins"))
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
deployerJars
|
||||
}
|
||||
|
||||
dependencies {
|
||||
deployerJars "org.apache.maven.wagon:wagon-http:1.0-beta-2"
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier = 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
jar {
|
||||
// from sourceSets.main.allJava
|
||||
manifest {
|
||||
attributes("Implementation-Title": "ElasticSearch", "Implementation-Version": rootProject.version, "Implementation-Date": buildTimeStr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
artifacts {
|
||||
archives sourcesJar
|
||||
archives javadocJar
|
||||
}
|
||||
|
||||
uploadArchives {
|
||||
repositories.mavenDeployer {
|
||||
configuration = configurations.deployerJars
|
||||
repository(url: rootProject.mavenRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
snapshotRepository(url: rootProject.mavenSnapshotRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
|
||||
pom.project {
|
||||
inceptionYear '2009'
|
||||
name 'elasticsearch-plugins-lang-groovy'
|
||||
description 'Groovy Plugin for ElasticSearch'
|
||||
licenses {
|
||||
license {
|
||||
name 'The Apache Software License, Version 2.0'
|
||||
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
distribution 'repo'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection 'git://github.com/elasticsearch/elasticsearch.git'
|
||||
developerConnection 'git@github.com:elasticsearch/elasticsearch.git'
|
||||
url 'http://github.com/elasticsearch/elasticsearch'
|
||||
}
|
||||
}
|
||||
|
||||
pom.whenConfigured {pom ->
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> dep.scope != 'test' } // removes the test scoped ones
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.client
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
class GAdminClient {
|
||||
|
||||
private final GClient gClient
|
||||
|
||||
final GIndicesAdminClient indices
|
||||
|
||||
final GClusterAdminClient cluster
|
||||
|
||||
GAdminClient(gClient) {
|
||||
this.gClient = gClient
|
||||
|
||||
this.indices = new GIndicesAdminClient(gClient)
|
||||
this.cluster = new GClusterAdminClient(gClient)
|
||||
}
|
||||
}
|
|
@ -1,357 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.client
|
||||
|
||||
import org.elasticsearch.action.ActionListener
|
||||
import org.elasticsearch.action.ListenableActionFuture
|
||||
import org.elasticsearch.action.count.CountRequest
|
||||
import org.elasticsearch.action.count.CountResponse
|
||||
import org.elasticsearch.action.delete.DeleteRequest
|
||||
import org.elasticsearch.action.delete.DeleteResponse
|
||||
import org.elasticsearch.action.deletebyquery.DeleteByQueryRequest
|
||||
import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse
|
||||
import org.elasticsearch.action.get.GetRequest
|
||||
import org.elasticsearch.action.get.GetResponse
|
||||
import org.elasticsearch.action.index.IndexRequest
|
||||
import org.elasticsearch.action.index.IndexResponse
|
||||
import org.elasticsearch.action.mlt.MoreLikeThisRequest
|
||||
import org.elasticsearch.action.percolate.PercolateRequest
|
||||
import org.elasticsearch.action.percolate.PercolateResponse
|
||||
import org.elasticsearch.action.search.SearchRequest
|
||||
import org.elasticsearch.action.search.SearchResponse
|
||||
import org.elasticsearch.client.Client
|
||||
import org.elasticsearch.client.action.count.CountRequestBuilder
|
||||
import org.elasticsearch.client.action.delete.DeleteRequestBuilder
|
||||
import org.elasticsearch.client.action.deletebyquery.DeleteByQueryRequestBuilder
|
||||
import org.elasticsearch.client.action.get.GetRequestBuilder
|
||||
import org.elasticsearch.client.action.index.IndexRequestBuilder
|
||||
import org.elasticsearch.client.action.percolate.PercolateRequestBuilder
|
||||
import org.elasticsearch.client.action.search.SearchRequestBuilder
|
||||
import org.elasticsearch.client.action.support.BaseRequestBuilder
|
||||
import org.elasticsearch.client.internal.InternalClient
|
||||
import org.elasticsearch.common.xcontent.XContentType
|
||||
import org.elasticsearch.groovy.client.action.GActionFuture
|
||||
import org.elasticsearch.groovy.common.xcontent.GXContentBuilder
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
class GClient {
|
||||
|
||||
static {
|
||||
BaseRequestBuilder.metaClass.gexecute = {
|
||||
ListenableActionFuture future = delegate.execute()
|
||||
return new GActionFuture(future)
|
||||
}
|
||||
|
||||
IndexRequest.metaClass.setSource = {Closure c ->
|
||||
delegate.source(new GXContentBuilder().buildAsBytes(c, indexContentType))
|
||||
}
|
||||
IndexRequest.metaClass.source = {Closure c ->
|
||||
delegate.source(new GXContentBuilder().buildAsBytes(c, indexContentType))
|
||||
}
|
||||
IndexRequestBuilder.metaClass.setSource = {Closure c ->
|
||||
delegate.setSource(new GXContentBuilder().buildAsBytes(c, indexContentType))
|
||||
}
|
||||
IndexRequestBuilder.metaClass.source = {Closure c ->
|
||||
delegate.setSource(new GXContentBuilder().buildAsBytes(c, indexContentType))
|
||||
}
|
||||
|
||||
DeleteByQueryRequest.metaClass.setQuery = {Closure c ->
|
||||
delegate.query(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
DeleteByQueryRequest.metaClass.query = {Closure c ->
|
||||
delegate.query(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
DeleteByQueryRequestBuilder.metaClass.setQuery = {Closure c ->
|
||||
delegate.setQuery(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
DeleteByQueryRequestBuilder.metaClass.query = {Closure c ->
|
||||
delegate.setQuery(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
|
||||
CountRequest.metaClass.setQuery = {Closure c ->
|
||||
delegate.query(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
CountRequest.metaClass.query = {Closure c ->
|
||||
delegate.query(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
CountRequestBuilder.metaClass.setQuery = {Closure c ->
|
||||
delegate.setQuery(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
CountRequestBuilder.metaClass.query = {Closure c ->
|
||||
delegate.setQuery(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
|
||||
SearchRequest.metaClass.setSource = {Closure c ->
|
||||
delegate.source(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequest.metaClass.source = {Closure c ->
|
||||
delegate.source(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequest.metaClass.setExtraSource = {Closure c ->
|
||||
delegate.extraSource(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequest.metaClass.extraSource = {Closure c ->
|
||||
delegate.extraSource(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequestBuilder.metaClass.setSource = {Closure c ->
|
||||
delegate.setSource(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequestBuilder.metaClass.source = {Closure c ->
|
||||
delegate.setSource(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequestBuilder.metaClass.setExtraSource = {Closure c ->
|
||||
delegate.setExtraSource(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequestBuilder.metaClass.extraSource = {Closure c ->
|
||||
delegate.setExtraSource(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequestBuilder.metaClass.setQuery = {Closure c ->
|
||||
delegate.setQuery(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequestBuilder.metaClass.query = {Closure c ->
|
||||
delegate.setQuery(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequestBuilder.metaClass.setFilter = {Closure c ->
|
||||
delegate.setFilter(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequestBuilder.metaClass.filter = {Closure c ->
|
||||
delegate.setFilter(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequestBuilder.metaClass.setFacets = {Closure c ->
|
||||
delegate.setFilter(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
SearchRequestBuilder.metaClass.facets = {Closure c ->
|
||||
delegate.setFilter(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
|
||||
MoreLikeThisRequest.metaClass.setSearchSource = {Closure c ->
|
||||
delegate.searchSource(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
MoreLikeThisRequest.metaClass.searchSource = {Closure c ->
|
||||
delegate.searchSource(new GXContentBuilder().buildAsBytes(c, contentType))
|
||||
}
|
||||
|
||||
PercolateRequest.metaClass.setSource = {Closure c ->
|
||||
delegate.source(new GXContentBuilder().buildAsBytes(c, indexContentType))
|
||||
}
|
||||
PercolateRequest.metaClass.source = {Closure c ->
|
||||
delegate.source(new GXContentBuilder().buildAsBytes(c, indexContentType))
|
||||
}
|
||||
PercolateRequestBuilder.metaClass.setSource = {Closure c ->
|
||||
delegate.setSource(new GXContentBuilder().buildAsBytes(c, indexContentType))
|
||||
}
|
||||
PercolateRequestBuilder.metaClass.source = {Closure c ->
|
||||
delegate.setSource(new GXContentBuilder().buildAsBytes(c, indexContentType))
|
||||
}
|
||||
}
|
||||
|
||||
public static XContentType contentType = XContentType.SMILE
|
||||
|
||||
public static XContentType indexContentType = XContentType.JSON
|
||||
|
||||
final Client client
|
||||
|
||||
int resolveStrategy = Closure.DELEGATE_FIRST
|
||||
|
||||
private final InternalClient internalClient
|
||||
|
||||
final GAdminClient admin
|
||||
|
||||
GClient(client) {
|
||||
this.client = client
|
||||
this.internalClient = client
|
||||
|
||||
this.admin = new GAdminClient(this)
|
||||
}
|
||||
|
||||
IndexRequestBuilder prepareIndex(String index, String type) {
|
||||
return client.prepareIndex(index, type)
|
||||
}
|
||||
|
||||
IndexRequestBuilder prepareIndex(String index, String type, String id) {
|
||||
return client.prepareIndex(index, type, id)
|
||||
}
|
||||
|
||||
GActionFuture<IndexResponse> index(Closure c) {
|
||||
IndexRequest request = new IndexRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = resolveStrategy
|
||||
c.call()
|
||||
index(request)
|
||||
}
|
||||
|
||||
GActionFuture<IndexResponse> index(IndexRequest request) {
|
||||
GActionFuture<IndexResponse> future = new GActionFuture<IndexResponse>(internalClient.threadPool(), request)
|
||||
client.index(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void index(IndexRequest request, ActionListener<IndexResponse> listener) {
|
||||
client.index(request, listener)
|
||||
}
|
||||
|
||||
GetRequestBuilder prepareGet(String index, String type, String id) {
|
||||
return client.prepareGet(index, type, id)
|
||||
}
|
||||
|
||||
GActionFuture<GetResponse> get(Closure c) {
|
||||
GetRequest request = new GetRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = resolveStrategy
|
||||
c.call()
|
||||
get(request)
|
||||
}
|
||||
|
||||
GActionFuture<GetResponse> get(GetRequest request) {
|
||||
GActionFuture<GetResponse> future = new GActionFuture<GetResponse>(internalClient.threadPool(), request)
|
||||
client.get(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void get(GetRequest request, ActionListener<GetResponse> listener) {
|
||||
client.get(request, listener)
|
||||
}
|
||||
|
||||
DeleteRequestBuilder prepareDelete(String index, String type, String id) {
|
||||
return client.prepareDelete(index, type, id)
|
||||
}
|
||||
|
||||
GActionFuture<DeleteResponse> delete(Closure c) {
|
||||
DeleteRequest request = new DeleteRequest()
|
||||
c.resolveStrategy = resolveStrategy
|
||||
c.setDelegate request
|
||||
c.call()
|
||||
delete(request)
|
||||
}
|
||||
|
||||
GActionFuture<DeleteResponse> delete(DeleteRequest request) {
|
||||
GActionFuture<DeleteResponse> future = new GActionFuture<DeleteResponse>(internalClient.threadPool(), request)
|
||||
client.delete(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void delete(DeleteRequest request, ActionListener<DeleteResponse> listener) {
|
||||
client.delete(request, listener)
|
||||
}
|
||||
|
||||
DeleteByQueryRequestBuilder prepareDeleteByQuery(String... indices) {
|
||||
return client.prepareDeleteByQuery(indices)
|
||||
}
|
||||
|
||||
GActionFuture<DeleteByQueryResponse> deleteByQuery(Closure c) {
|
||||
DeleteByQueryRequest request = new DeleteByQueryRequest()
|
||||
c.resolveStrategy = resolveStrategy
|
||||
c.setDelegate request
|
||||
c.call()
|
||||
deleteByQuery(request)
|
||||
}
|
||||
|
||||
GActionFuture<DeleteByQueryResponse> deleteByQuery(DeleteByQueryRequest request) {
|
||||
GActionFuture<DeleteByQueryResponse> future = new GActionFuture<DeleteByQueryResponse>(internalClient.threadPool(), request)
|
||||
client.deleteByQuery(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void deleteByQuery(DeleteByQueryRequest request, ActionListener<DeleteByQueryResponse> listener) {
|
||||
client.deleteByQuery(request, listener)
|
||||
}
|
||||
|
||||
CountRequestBuilder prepareCount(String... indices) {
|
||||
return client.prepareCount(indices)
|
||||
}
|
||||
|
||||
GActionFuture<CountResponse> count(Closure c) {
|
||||
CountRequest request = new CountRequest()
|
||||
c.resolveStrategy = resolveStrategy
|
||||
c.setDelegate request
|
||||
c.call()
|
||||
count(request)
|
||||
}
|
||||
|
||||
GActionFuture<CountResponse> count(CountRequest request) {
|
||||
GActionFuture<CountResponse> future = new GActionFuture<CountResponse>(internalClient.threadPool(), request)
|
||||
client.count(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void count(CountRequest request, ActionListener<CountResponse> listener) {
|
||||
client.count(request, listener)
|
||||
}
|
||||
|
||||
SearchRequestBuilder prepareSearch(String... indices) {
|
||||
return client.prepareSearch(indices)
|
||||
}
|
||||
|
||||
GActionFuture<SearchResponse> search(Closure c) {
|
||||
SearchRequest request = new SearchRequest()
|
||||
c.resolveStrategy = resolveStrategy
|
||||
c.setDelegate request
|
||||
c.call()
|
||||
search(request)
|
||||
}
|
||||
|
||||
GActionFuture<SearchResponse> search(SearchRequest request) {
|
||||
GActionFuture<SearchResponse> future = new GActionFuture<SearchResponse>(internalClient.threadPool(), request)
|
||||
client.search(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void search(SearchRequest request, ActionListener<SearchResponse> listener) {
|
||||
client.search(request, listener)
|
||||
}
|
||||
|
||||
PercolateRequestBuilder preparePercolate(String index, String type) {
|
||||
return client.preparePercolate(index, type);
|
||||
}
|
||||
|
||||
GActionFuture<PercolateResponse> percolate(Closure c) {
|
||||
PercolateRequest request = new PercolateRequest();
|
||||
c.resolveStrategy = resolveStrategy
|
||||
c.setDelegate request
|
||||
c.call()
|
||||
percolate(request)
|
||||
}
|
||||
|
||||
GActionFuture<PercolateResponse> percolate(PercolateRequest request) {
|
||||
GActionFuture<PercolateResponse> future = new GActionFuture<PercolateResponse>(internalClient.threadPool(), request)
|
||||
client.percolate(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
GActionFuture<SearchResponse> moreLikeThis(Closure c) {
|
||||
MoreLikeThisRequest request = new MoreLikeThisRequest()
|
||||
c.resolveStrategy = resolveStrategy
|
||||
c.setDelegate request
|
||||
c.call()
|
||||
moreLikeThis(request)
|
||||
}
|
||||
|
||||
GActionFuture<SearchResponse> moreLikeThis(MoreLikeThisRequest request) {
|
||||
GActionFuture<SearchResponse> future = new GActionFuture<SearchResponse>(internalClient.threadPool(), request)
|
||||
client.moreLikeThis(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void moreLikeThis(MoreLikeThisRequest request, ActionListener<SearchResponse> listener) {
|
||||
client.moreLikeThis(request, listener)
|
||||
}
|
||||
}
|
|
@ -1,205 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.client
|
||||
|
||||
import org.elasticsearch.action.ActionListener
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse
|
||||
import org.elasticsearch.action.admin.cluster.node.restart.NodesRestartRequest
|
||||
import org.elasticsearch.action.admin.cluster.node.restart.NodesRestartResponse
|
||||
import org.elasticsearch.action.admin.cluster.node.shutdown.NodesShutdownRequest
|
||||
import org.elasticsearch.action.admin.cluster.node.shutdown.NodesShutdownResponse
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse
|
||||
import org.elasticsearch.client.ClusterAdminClient
|
||||
import org.elasticsearch.client.action.admin.cluster.health.ClusterHealthRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.cluster.node.info.NodesInfoRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.cluster.node.restart.NodesRestartRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.cluster.node.shutdown.NodesShutdownRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.cluster.node.stats.NodesStatsRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.cluster.state.ClusterStateRequestBuilder
|
||||
import org.elasticsearch.client.internal.InternalClient
|
||||
import org.elasticsearch.groovy.client.action.GActionFuture
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
class GClusterAdminClient {
|
||||
|
||||
private final GClient gClient
|
||||
|
||||
private final InternalClient internalClient
|
||||
|
||||
final ClusterAdminClient clusterAdminClient
|
||||
|
||||
def GClusterAdminClient(gClient) {
|
||||
this.gClient = gClient
|
||||
this.internalClient = gClient.client
|
||||
this.clusterAdminClient = internalClient.admin().cluster()
|
||||
}
|
||||
|
||||
// HEALTH
|
||||
|
||||
ClusterHealthRequestBuilder prepareHealth(String... indices) {
|
||||
clusterAdminClient.prepareHealth(indices)
|
||||
}
|
||||
|
||||
GActionFuture<ClusterHealthResponse> health(Closure c) {
|
||||
ClusterHealthRequest request = new ClusterHealthRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
health(request)
|
||||
}
|
||||
|
||||
GActionFuture<ClusterHealthResponse> health(ClusterHealthRequest request) {
|
||||
GActionFuture<ClusterHealthResponse> future = new GActionFuture<ClusterHealthResponse>(internalClient.threadPool(), request)
|
||||
clusterAdminClient.health(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void status(ClusterHealthRequest request, ActionListener<ClusterHealthResponse> listener) {
|
||||
clusterAdminClient.health(request, listener)
|
||||
}
|
||||
|
||||
// STATE
|
||||
|
||||
ClusterStateRequestBuilder prepareState() {
|
||||
clusterAdminClient.prepareState()
|
||||
}
|
||||
|
||||
GActionFuture<ClusterStateResponse> state(Closure c) {
|
||||
ClusterStateRequest request = new ClusterStateRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
state(request)
|
||||
}
|
||||
|
||||
GActionFuture<ClusterStateResponse> state(ClusterStateRequest request) {
|
||||
GActionFuture<ClusterStateResponse> future = new GActionFuture<ClusterStateResponse>(internalClient.threadPool(), request)
|
||||
clusterAdminClient.state(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void state(ClusterStateRequest request, ActionListener<ClusterStateResponse> listener) {
|
||||
clusterAdminClient.state(request, listener)
|
||||
}
|
||||
|
||||
// NODES INFO
|
||||
|
||||
NodesInfoRequestBuilder prepareNodesInfo(String... nodesIds) {
|
||||
clusterAdminClient.prepareNodesInfo(nodesIds)
|
||||
}
|
||||
|
||||
GActionFuture<NodesInfoResponse> nodesInfo(Closure c) {
|
||||
NodesInfoRequest request = new NodesInfoRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
nodesInfo(request)
|
||||
}
|
||||
|
||||
GActionFuture<NodesInfoResponse> nodesInfo(NodesInfoRequest request) {
|
||||
GActionFuture<NodesInfoResponse> future = new GActionFuture<NodesInfoResponse>(internalClient.threadPool(), request)
|
||||
clusterAdminClient.nodesInfo(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void nodesInfo(NodesInfoRequest request, ActionListener<NodesInfoResponse> listener) {
|
||||
clusterAdminClient.nodesInfo(request, listener)
|
||||
}
|
||||
|
||||
// NODES STATS
|
||||
|
||||
NodesStatsRequestBuilder prepareNodesStats(String... nodesIds) {
|
||||
clusterAdminClient.prepareNodesStats(nodesIds)
|
||||
}
|
||||
|
||||
GActionFuture<NodesStatsResponse> nodesStats(Closure c) {
|
||||
NodesStatsRequest request = new NodesStatsRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
nodesStats(request)
|
||||
}
|
||||
|
||||
GActionFuture<NodesStatsResponse> nodesStats(NodesStatsRequest request) {
|
||||
GActionFuture<NodesStatsResponse> future = new GActionFuture<NodesStatsResponse>(internalClient.threadPool(), request)
|
||||
clusterAdminClient.nodesStats(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void nodesStats(NodesStatsRequest request, ActionListener<NodesStatsResponse> listener) {
|
||||
clusterAdminClient.nodesStats(request, listener)
|
||||
}
|
||||
|
||||
// NODES SHUTDOWN
|
||||
|
||||
NodesShutdownRequestBuilder prepareNodesShutdown(String... nodesIds) {
|
||||
clusterAdminClient.prepareNodesShutdown(nodesIds)
|
||||
}
|
||||
|
||||
GActionFuture<NodesShutdownResponse> nodesShutdown(Closure c) {
|
||||
NodesShutdownRequest request = new NodesShutdownRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
nodesShutdown(request)
|
||||
}
|
||||
|
||||
GActionFuture<NodesShutdownResponse> nodesShutdown(NodesShutdownRequest request) {
|
||||
GActionFuture<NodesShutdownResponse> future = new GActionFuture<NodesShutdownResponse>(internalClient.threadPool(), request)
|
||||
clusterAdminClient.nodesShutdown(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void nodesShutdown(NodesShutdownRequest request, ActionListener<NodesShutdownResponse> listener) {
|
||||
clusterAdminClient.nodesShutdown(request, listener)
|
||||
}
|
||||
|
||||
// NODES RESTART
|
||||
|
||||
NodesRestartRequestBuilder prepareNodesRestart(String... nodesIds) {
|
||||
clusterAdminClient.prepareNodesRestart(nodesIds)
|
||||
}
|
||||
|
||||
GActionFuture<NodesRestartResponse> nodesRestart(Closure c) {
|
||||
NodesRestartRequest request = new NodesRestartRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
nodesRestart(request)
|
||||
}
|
||||
|
||||
GActionFuture<NodesRestartResponse> nodesRestart(NodesRestartRequest request) {
|
||||
GActionFuture<NodesRestartResponse> future = new GActionFuture<NodesRestartResponse>(internalClient.threadPool(), request)
|
||||
clusterAdminClient.nodesRestart(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void nodesRestart(NodesRestartRequest request, ActionListener<NodesRestartResponse> listener) {
|
||||
clusterAdminClient.nodesRestart(request, listener)
|
||||
}
|
||||
}
|
|
@ -1,483 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.client
|
||||
|
||||
import org.elasticsearch.action.ActionListener
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushRequest
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushResponse
|
||||
import org.elasticsearch.action.admin.indices.gateway.snapshot.GatewaySnapshotRequest
|
||||
import org.elasticsearch.action.admin.indices.gateway.snapshot.GatewaySnapshotResponse
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse
|
||||
import org.elasticsearch.action.admin.indices.optimize.OptimizeRequest
|
||||
import org.elasticsearch.action.admin.indices.optimize.OptimizeResponse
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse
|
||||
import org.elasticsearch.action.admin.indices.settings.UpdateSettingsRequest
|
||||
import org.elasticsearch.action.admin.indices.settings.UpdateSettingsResponse
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStats
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest
|
||||
import org.elasticsearch.action.admin.indices.status.IndicesStatusRequest
|
||||
import org.elasticsearch.action.admin.indices.status.IndicesStatusResponse
|
||||
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest
|
||||
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse
|
||||
import org.elasticsearch.client.IndicesAdminClient
|
||||
import org.elasticsearch.client.action.admin.indices.alias.IndicesAliasesRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.analyze.AnalyzeRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.create.CreateIndexRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.delete.DeleteIndexRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.flush.FlushRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.gateway.snapshot.GatewaySnapshotRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.mapping.put.PutMappingRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.optimize.OptimizeRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.refresh.RefreshRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.settings.UpdateSettingsRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.stats.IndicesStatsRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.status.IndicesStatusRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder
|
||||
import org.elasticsearch.client.action.admin.indices.template.put.PutIndexTemplateRequestBuilder
|
||||
import org.elasticsearch.client.internal.InternalClient
|
||||
import org.elasticsearch.groovy.client.action.GActionFuture
|
||||
import org.elasticsearch.groovy.common.xcontent.GXContentBuilder
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
class GIndicesAdminClient {
|
||||
|
||||
static {
|
||||
CreateIndexRequest.metaClass.setSettings = {Closure c ->
|
||||
delegate.settings(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
CreateIndexRequest.metaClass.settings = {Closure c ->
|
||||
delegate.settings(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
CreateIndexRequest.metaClass.mapping = {String type, Closure c ->
|
||||
delegate.mapping(type, new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
CreateIndexRequest.metaClass.setMapping = {String type, Closure c ->
|
||||
delegate.mapping(type, new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
CreateIndexRequestBuilder.metaClass.setSettings = {Closure c ->
|
||||
delegate.setSettings(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
CreateIndexRequestBuilder.metaClass.settings = {Closure c ->
|
||||
delegate.setSettings(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
CreateIndexRequestBuilder.metaClass.addMapping = {String type, Closure c ->
|
||||
delegate.addMapping(type, new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
|
||||
PutMappingRequest.metaClass.setSource = {Closure c ->
|
||||
delegate.source(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
PutMappingRequest.metaClass.source = {Closure c ->
|
||||
delegate.source(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
PutMappingRequestBuilder.metaClass.setSource = {Closure c ->
|
||||
delegate.setSource(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
PutMappingRequestBuilder.metaClass.source = {Closure c ->
|
||||
delegate.setSource(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
|
||||
UpdateSettingsRequest.metaClass.setSettings = {Closure c ->
|
||||
delegate.settings(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
UpdateSettingsRequest.metaClass.settings = {Closure c ->
|
||||
delegate.settings(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
UpdateSettingsRequestBuilder.metaClass.setSettings = {Closure c ->
|
||||
delegate.setSettings(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
UpdateSettingsRequestBuilder.metaClass.settings = {Closure c ->
|
||||
delegate.setSettings(new GXContentBuilder().buildAsString(c))
|
||||
}
|
||||
}
|
||||
|
||||
private final GClient gClient
|
||||
|
||||
private final InternalClient internalClient
|
||||
|
||||
final IndicesAdminClient indicesAdminClient
|
||||
|
||||
def GIndicesAdminClient(gClient) {
|
||||
this.gClient = gClient
|
||||
this.internalClient = gClient.client
|
||||
this.indicesAdminClient = internalClient.admin().indices()
|
||||
}
|
||||
|
||||
// STATUS
|
||||
|
||||
IndicesStatusRequestBuilder prepareStatus(String... indices) {
|
||||
indicesAdminClient.prepareStatus(indices)
|
||||
}
|
||||
|
||||
GActionFuture<IndicesStatusResponse> status(Closure c) {
|
||||
IndicesStatusRequest request = new IndicesStatusRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
status(request)
|
||||
}
|
||||
|
||||
GActionFuture<IndicesStatusResponse> status(IndicesStatusRequest request) {
|
||||
GActionFuture<IndicesStatusResponse> future = new GActionFuture<IndicesStatusResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.status(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void status(IndicesStatusRequest request, ActionListener<IndicesStatusResponse> listener) {
|
||||
indicesAdminClient.status(request, listener)
|
||||
}
|
||||
|
||||
// STATS
|
||||
|
||||
IndicesStatsRequestBuilder prepareStats(String... indices) {
|
||||
indicesAdminClient.prepareStats(indices)
|
||||
}
|
||||
|
||||
GActionFuture<IndicesStats> stats(Closure c) {
|
||||
IndicesStatsRequest request = new IndicesStatsRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
stats(request)
|
||||
}
|
||||
|
||||
GActionFuture<IndicesStats> stats(IndicesStatsRequest request) {
|
||||
GActionFuture<IndicesStats> future = new GActionFuture<IndicesStats>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.stats(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void stats(IndicesStatsRequest request, ActionListener<IndicesStats> listener) {
|
||||
indicesAdminClient.stats(request, listener)
|
||||
}
|
||||
|
||||
// CREATE
|
||||
|
||||
CreateIndexRequestBuilder prepareCreate(String index) {
|
||||
indicesAdminClient.prepareCreate(index)
|
||||
}
|
||||
|
||||
GActionFuture<CreateIndexResponse> create(Closure c) {
|
||||
CreateIndexRequest request = new CreateIndexRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
create(request)
|
||||
}
|
||||
|
||||
GActionFuture<CreateIndexResponse> create(CreateIndexRequest request) {
|
||||
GActionFuture<CreateIndexResponse> future = new GActionFuture<CreateIndexResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.create(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void create(CreateIndexRequest request, ActionListener<CreateIndexResponse> listener) {
|
||||
indicesAdminClient.create(request, listener)
|
||||
}
|
||||
|
||||
// DELETE
|
||||
|
||||
DeleteIndexRequestBuilder prepareDelete(String index) {
|
||||
indicesAdminClient.prepareDelete(index)
|
||||
}
|
||||
|
||||
GActionFuture<DeleteIndexResponse> delete(Closure c) {
|
||||
DeleteIndexRequest request = new DeleteIndexRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
delete(request)
|
||||
}
|
||||
|
||||
GActionFuture<DeleteIndexResponse> delete(DeleteIndexRequest request) {
|
||||
GActionFuture<DeleteIndexResponse> future = new GActionFuture<DeleteIndexResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.delete(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void delete(DeleteIndexRequest request, ActionListener<DeleteIndexResponse> listener) {
|
||||
indicesAdminClient.delete(request, listener)
|
||||
}
|
||||
|
||||
// REFRESH
|
||||
|
||||
RefreshRequestBuilder prepareRefresh(String... indices) {
|
||||
indicesAdminClient.prepareRefresh(indices)
|
||||
}
|
||||
|
||||
GActionFuture<RefreshResponse> refresh(Closure c) {
|
||||
RefreshRequest request = new RefreshRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
refresh(request)
|
||||
}
|
||||
|
||||
GActionFuture<RefreshResponse> refresh(RefreshRequest request) {
|
||||
GActionFuture<RefreshResponse> future = new GActionFuture<RefreshResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.refresh(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void refresh(RefreshRequest request, ActionListener<RefreshResponse> listener) {
|
||||
indicesAdminClient.refresh(request, listener)
|
||||
}
|
||||
|
||||
// FLUSH
|
||||
|
||||
FlushRequestBuilder prepareFlush(String... indices) {
|
||||
indicesAdminClient.prepareFlush(indices)
|
||||
}
|
||||
|
||||
GActionFuture<FlushResponse> flush(Closure c) {
|
||||
FlushRequest request = new FlushRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
flush(request)
|
||||
}
|
||||
|
||||
GActionFuture<FlushResponse> flush(FlushRequest request) {
|
||||
GActionFuture<FlushResponse> future = new GActionFuture<FlushResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.flush(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void flush(FlushRequest request, ActionListener<FlushResponse> listener) {
|
||||
indicesAdminClient.flush(request, listener)
|
||||
}
|
||||
|
||||
// OPTIMIZE
|
||||
|
||||
OptimizeRequestBuilder prepareOptimize(String... indices) {
|
||||
indicesAdminClient.prepareOptimize(indices)
|
||||
}
|
||||
|
||||
GActionFuture<OptimizeResponse> optimize(Closure c) {
|
||||
OptimizeRequest request = new OptimizeRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
optimize(request)
|
||||
}
|
||||
|
||||
GActionFuture<OptimizeResponse> optimize(OptimizeRequest request) {
|
||||
GActionFuture<OptimizeResponse> future = new GActionFuture<OptimizeResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.optimize(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void optimize(OptimizeRequest request, ActionListener<OptimizeResponse> listener) {
|
||||
indicesAdminClient.optimize(request, listener)
|
||||
}
|
||||
|
||||
// PUT MAPPING
|
||||
|
||||
PutMappingRequestBuilder preparePutMapping(String... indices) {
|
||||
indicesAdminClient.preparePutMapping(indices)
|
||||
}
|
||||
|
||||
GActionFuture<PutMappingResponse> putMapping(Closure c) {
|
||||
PutMappingRequest request = new PutMappingRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
putMapping(request)
|
||||
}
|
||||
|
||||
GActionFuture<PutMappingResponse> putMapping(PutMappingRequest request) {
|
||||
GActionFuture<PutMappingResponse> future = new GActionFuture<PutMappingResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.putMapping(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void putMapping(PutMappingRequest request, ActionListener<PutMappingResponse> listener) {
|
||||
indicesAdminClient.putMapping(request, listener)
|
||||
}
|
||||
|
||||
// GATEWAY SNAPSHOT
|
||||
|
||||
GatewaySnapshotRequestBuilder prepareGatewaySnapshot(String... indices) {
|
||||
indicesAdminClient.prepareGatewaySnapshot(indices)
|
||||
}
|
||||
|
||||
GActionFuture<GatewaySnapshotResponse> gatewaySnapshot(Closure c) {
|
||||
GatewaySnapshotRequest request = new GatewaySnapshotRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
gatewaySnapshot(request)
|
||||
}
|
||||
|
||||
GActionFuture<GatewaySnapshotResponse> gatewaySnapshot(GatewaySnapshotRequest request) {
|
||||
GActionFuture<GatewaySnapshotResponse> future = new GActionFuture<GatewaySnapshotResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.gatewaySnapshot(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void gatewaySnapshot(GatewaySnapshotRequest request, ActionListener<GatewaySnapshotResponse> listener) {
|
||||
indicesAdminClient.gatewaySnapshot(request, listener)
|
||||
}
|
||||
|
||||
// Aliases
|
||||
|
||||
IndicesAliasesRequestBuilder prepareAliases() {
|
||||
indicesAdminClient.prepareAliases()
|
||||
}
|
||||
|
||||
GActionFuture<IndicesAliasesResponse> aliases(Closure c) {
|
||||
IndicesAliasesRequest request = new IndicesAliasesRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
aliases(request)
|
||||
}
|
||||
|
||||
GActionFuture<IndicesAliasesResponse> aliases(IndicesAliasesRequest request) {
|
||||
GActionFuture<IndicesAliasesResponse> future = new GActionFuture<IndicesAliasesResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.aliases(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
void aliases(IndicesAliasesRequest request, ActionListener<IndicesAliasesResponse> listener) {
|
||||
indicesAdminClient.aliases(request, listener)
|
||||
}
|
||||
|
||||
void aliases(ClearIndicesCacheRequest request, ActionListener<ClearIndicesCacheResponse> listener) {
|
||||
indicesAdminClient.clearCache(request, listener)
|
||||
}
|
||||
|
||||
// CLEAR CACHE
|
||||
|
||||
ClearIndicesCacheRequestBuilder prepareClearCache(String... indices) {
|
||||
indicesAdminClient.prepareClearCache(indices)
|
||||
}
|
||||
|
||||
GActionFuture<ClearIndicesCacheResponse> clearCache(Closure c) {
|
||||
ClearIndicesCacheRequest request = new ClearIndicesCacheRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
clearCache(request)
|
||||
}
|
||||
|
||||
GActionFuture<ClearIndicesCacheResponse> clearCache(ClearIndicesCacheRequest request) {
|
||||
GActionFuture<ClearIndicesCacheResponse> future = new GActionFuture<ClearIndicesCacheResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.clearCache(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
// UPDATE SETTINGS
|
||||
|
||||
UpdateSettingsRequestBuilder prepareUpdateSettings(String... indices) {
|
||||
indicesAdminClient.prepareUpdateSettings(indices)
|
||||
}
|
||||
|
||||
GActionFuture<UpdateSettingsResponse> updateSettings(Closure c) {
|
||||
UpdateSettingsRequest request = new UpdateSettingsRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
updateSettings(request)
|
||||
}
|
||||
|
||||
GActionFuture<UpdateSettingsResponse> updateSettings(UpdateSettingsRequest request) {
|
||||
GActionFuture<UpdateSettingsResponse> future = new GActionFuture<UpdateSettingsResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.updateSettings(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
// ANALYZE
|
||||
|
||||
AnalyzeRequestBuilder prepareAnalyze(String index, String text) {
|
||||
indicesAdminClient.prepareAnalyze(index, text)
|
||||
}
|
||||
|
||||
GActionFuture<AnalyzeResponse> analyze(Closure c) {
|
||||
AnalyzeRequest request = new AnalyzeRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
analyze(request)
|
||||
}
|
||||
|
||||
GActionFuture<AnalyzeResponse> analyze(AnalyzeRequest request) {
|
||||
GActionFuture<AnalyzeResponse> future = new GActionFuture<AnalyzeResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.analyze(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
// PUT INDEX TEMPLATE
|
||||
|
||||
PutIndexTemplateRequestBuilder preparePutTemplate(String name) {
|
||||
indicesAdminClient.preparePutTemplate(name)
|
||||
}
|
||||
|
||||
GActionFuture<PutIndexTemplateResponse> putTemplate(Closure c) {
|
||||
PutIndexTemplateRequest request = new PutIndexTemplateRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
putTemplate(request)
|
||||
}
|
||||
|
||||
GActionFuture<PutIndexTemplateResponse> putTemplate(PutIndexTemplateRequest request) {
|
||||
GActionFuture<PutIndexTemplateResponse> future = new GActionFuture<PutIndexTemplateResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.putTemplate(request, future)
|
||||
return future
|
||||
}
|
||||
|
||||
// DELETE INDEX TEMPLATE
|
||||
|
||||
DeleteIndexTemplateRequestBuilder prepareDeleteTemplate(String name) {
|
||||
indicesAdminClient.prepareDeleteTemplate(name)
|
||||
}
|
||||
|
||||
GActionFuture<DeleteIndexTemplateResponse> deleteTemplate(Closure c) {
|
||||
DeleteIndexTemplateRequest request = new DeleteIndexTemplateRequest()
|
||||
c.setDelegate request
|
||||
c.resolveStrategy = gClient.resolveStrategy
|
||||
c.call()
|
||||
deleteTemplate(request)
|
||||
}
|
||||
|
||||
GActionFuture<DeleteIndexTemplateResponse> deleteTemplate(DeleteIndexTemplateRequest request) {
|
||||
GActionFuture<DeleteIndexTemplateResponse> future = new GActionFuture<DeleteIndexTemplateResponse>(internalClient.threadPool(), request)
|
||||
indicesAdminClient.deleteTemplate(request, future)
|
||||
return future
|
||||
}
|
||||
}
|
|
@ -1,167 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.client.action;
|
||||
|
||||
import groovy.lang.Closure;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ListenableActionFuture;
|
||||
import org.elasticsearch.action.support.PlainListenableActionFuture;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class GActionFuture<T> implements ListenableActionFuture<T>, ActionListener<T> {
|
||||
|
||||
private final PlainListenableActionFuture<T> future;
|
||||
|
||||
public GActionFuture(ListenableActionFuture<T> future) {
|
||||
this.future = (PlainListenableActionFuture<T>) future;
|
||||
}
|
||||
|
||||
public GActionFuture(ThreadPool threadPool, ActionRequest request) {
|
||||
this.future = new PlainListenableActionFuture<T>(request.listenerThreaded(), threadPool);
|
||||
}
|
||||
|
||||
public void setListener(final Closure listener) {
|
||||
addListener(new ActionListener<T>() {
|
||||
@Override public void onResponse(T t) {
|
||||
listener.call(this);
|
||||
}
|
||||
|
||||
@Override public void onFailure(Throwable e) {
|
||||
listener.call(this);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void setSuccess(final Closure success) {
|
||||
addListener(new ActionListener<T>() {
|
||||
@Override public void onResponse(T t) {
|
||||
success.call(t);
|
||||
}
|
||||
|
||||
@Override public void onFailure(Throwable e) {
|
||||
// ignore
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void setFailure(final Closure failure) {
|
||||
addListener(new ActionListener<T>() {
|
||||
@Override public void onResponse(T t) {
|
||||
// nothing
|
||||
}
|
||||
|
||||
@Override public void onFailure(Throwable e) {
|
||||
failure.call(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public T getResponse() {
|
||||
return actionGet();
|
||||
}
|
||||
|
||||
public T response(String timeout) throws ElasticSearchException {
|
||||
return actionGet(timeout);
|
||||
}
|
||||
|
||||
public T response(long timeoutMillis) throws ElasticSearchException {
|
||||
return actionGet(timeoutMillis);
|
||||
}
|
||||
|
||||
public T response(TimeValue timeout) throws ElasticSearchException {
|
||||
return actionGet(timeout);
|
||||
}
|
||||
|
||||
public T response(long timeout, TimeUnit unit) throws ElasticSearchException {
|
||||
return actionGet(timeout, unit);
|
||||
}
|
||||
|
||||
@Override public void onResponse(T t) {
|
||||
future.onResponse(t);
|
||||
}
|
||||
|
||||
@Override public void onFailure(Throwable e) {
|
||||
future.onFailure(e);
|
||||
}
|
||||
|
||||
// delegate methods
|
||||
|
||||
public void addListener(ActionListener<T> tActionListener) {
|
||||
future.addListener(tActionListener);
|
||||
}
|
||||
|
||||
@Override public void addListener(Runnable listener) {
|
||||
future.addListener(listener);
|
||||
}
|
||||
|
||||
@Override public T actionGet() throws ElasticSearchException {
|
||||
return future.actionGet();
|
||||
}
|
||||
|
||||
@Override public T actionGet(String timeout) throws ElasticSearchException {
|
||||
return future.actionGet(timeout);
|
||||
}
|
||||
|
||||
@Override public T actionGet(long timeoutMillis) throws ElasticSearchException {
|
||||
return future.actionGet(timeoutMillis);
|
||||
}
|
||||
|
||||
@Override public T actionGet(long timeout, TimeUnit unit) throws ElasticSearchException {
|
||||
return future.actionGet(timeout, unit);
|
||||
}
|
||||
|
||||
@Override public T actionGet(TimeValue timeout) throws ElasticSearchException {
|
||||
return future.actionGet(timeout);
|
||||
}
|
||||
|
||||
@Override public boolean cancel(boolean mayInterruptIfRunning) {
|
||||
return future.cancel(mayInterruptIfRunning);
|
||||
}
|
||||
|
||||
@Override public boolean isCancelled() {
|
||||
return future.isCancelled();
|
||||
}
|
||||
|
||||
@Override public boolean isDone() {
|
||||
return future.isDone();
|
||||
}
|
||||
|
||||
@Override public T get() throws InterruptedException, ExecutionException {
|
||||
return future.get();
|
||||
}
|
||||
|
||||
@Override public T get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
|
||||
return future.get(timeout, unit);
|
||||
}
|
||||
|
||||
@Override public Throwable getRootFailure() {
|
||||
return future.getRootFailure();
|
||||
}
|
||||
}
|
|
@ -1,189 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.common.xcontent
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder
|
||||
import org.elasticsearch.common.xcontent.XContentFactory
|
||||
import org.elasticsearch.common.xcontent.XContentType
|
||||
|
||||
/**
|
||||
* Used to build JSON data.
|
||||
*
|
||||
* @author Marc Palmer
|
||||
* @author Graeme Rocher
|
||||
*
|
||||
* @since 1.2
|
||||
*/
|
||||
class GXContentBuilder {
|
||||
|
||||
static NODE_ELEMENT = 'element'
|
||||
|
||||
static int rootResolveStrategy = Closure.OWNER_FIRST // the default in Closure
|
||||
|
||||
def root
|
||||
|
||||
def current
|
||||
|
||||
def nestingStack = []
|
||||
|
||||
def build(Closure c) {
|
||||
return buildRoot(c)
|
||||
}
|
||||
|
||||
String buildAsString(Closure c) {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)
|
||||
def json = build(c)
|
||||
builder.map(json)
|
||||
return builder.string()
|
||||
}
|
||||
|
||||
byte[] buildAsBytes(Closure c) {
|
||||
return buildAsBytes(c, XContentType.JSON)
|
||||
}
|
||||
|
||||
byte[] buildAsBytes(Closure c, XContentType contentType) {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(contentType)
|
||||
def json = build(c)
|
||||
builder.map(json)
|
||||
return builder.copiedBytes()
|
||||
}
|
||||
|
||||
private buildRoot(Closure c) {
|
||||
c.delegate = this
|
||||
c.resolveStrategy = rootResolveStrategy
|
||||
root = [:]
|
||||
current = root
|
||||
def returnValue = c.call()
|
||||
if (!root) {
|
||||
return returnValue
|
||||
}
|
||||
return root
|
||||
}
|
||||
|
||||
def invokeMethod(String methodName) {
|
||||
current[methodName] = []
|
||||
}
|
||||
|
||||
List array(Closure c) {
|
||||
def prev = current
|
||||
def list = []
|
||||
try {
|
||||
current = list
|
||||
c.call(list)
|
||||
}
|
||||
finally {
|
||||
current = prev
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
def invokeMethod(String methodName, Object args) {
|
||||
if (args.size()) {
|
||||
if (args[0] instanceof Map) {
|
||||
// switch root to an array if elements used at top level
|
||||
if ((current == root) && (methodName == NODE_ELEMENT) && !(root instanceof List)) {
|
||||
if (root.size()) {
|
||||
throw new IllegalArgumentException('Cannot have array elements in root node if properties of root have already been set')
|
||||
} else {
|
||||
root = []
|
||||
current = root
|
||||
}
|
||||
}
|
||||
def n = [:]
|
||||
if (current instanceof List) {
|
||||
current << n
|
||||
} else {
|
||||
current[methodName] = n
|
||||
}
|
||||
n.putAll(args[0])
|
||||
} else if (args[-1] instanceof Closure) {
|
||||
final Object callable = args[-1]
|
||||
handleClosureNode(methodName, callable)
|
||||
} else if (args.size() == 1) {
|
||||
if (methodName != NODE_ELEMENT) {
|
||||
throw new IllegalArgumentException('Array elements must be defined with the "element" method call eg: element(value)')
|
||||
}
|
||||
// switch root to an array if elements used at top level
|
||||
if (current == root) {
|
||||
if (root.size() && methodName != NODE_ELEMENT) {
|
||||
throw new IllegalArgumentException('Cannot have array elements in root node if properties of root have already been set')
|
||||
} else if (!(root instanceof List)) {
|
||||
root = []
|
||||
current = root
|
||||
}
|
||||
}
|
||||
if (current instanceof List) {
|
||||
current << args[0]
|
||||
} else {
|
||||
throw new IllegalArgumentException('Array elements can only be defined under "array" nodes')
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("This builder does not support invocation of [$methodName] with arg list ${args.dump()}")
|
||||
}
|
||||
} else {
|
||||
current[methodName] = []
|
||||
}
|
||||
}
|
||||
|
||||
private handleClosureNode(String methodName, callable) {
|
||||
def n = [:]
|
||||
nestingStack << current
|
||||
|
||||
if (current instanceof List) {
|
||||
current << n
|
||||
}
|
||||
else {
|
||||
current[methodName] = n
|
||||
}
|
||||
current = n
|
||||
callable.call()
|
||||
current = nestingStack.pop()
|
||||
}
|
||||
|
||||
|
||||
void setProperty(String propName, Object value) {
|
||||
if (value instanceof Closure) {
|
||||
handleClosureNode(propName, value)
|
||||
}
|
||||
else if (value instanceof List) {
|
||||
value = value.collect {
|
||||
if (it instanceof Closure) {
|
||||
def callable = it
|
||||
final GXContentBuilder localBuilder = new GXContentBuilder()
|
||||
callable.delegate = localBuilder
|
||||
callable.resolveStrategy = Closure.DELEGATE_FIRST
|
||||
final Map nestedObject = localBuilder.buildRoot(callable)
|
||||
return nestedObject
|
||||
}
|
||||
else {
|
||||
return it
|
||||
}
|
||||
}
|
||||
current[propName] = value
|
||||
}
|
||||
else {
|
||||
current[propName] = value
|
||||
}
|
||||
}
|
||||
|
||||
def getProperty(String propName) {
|
||||
current[propName]
|
||||
}
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.node
|
||||
|
||||
import org.elasticsearch.common.settings.Settings
|
||||
import org.elasticsearch.groovy.client.GClient
|
||||
import org.elasticsearch.node.Node
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
class GNode {
|
||||
|
||||
final Node node
|
||||
|
||||
final GClient client
|
||||
|
||||
GNode(Node node) {
|
||||
this.node = node
|
||||
this.client = new GClient(node.client())
|
||||
}
|
||||
|
||||
/**
|
||||
* The settings that were used to create the node.
|
||||
*/
|
||||
Settings getSettings() {
|
||||
node.settings()
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the node. If the node is already started, this method is no-op.
|
||||
*/
|
||||
GNode start() {
|
||||
node.start()
|
||||
this
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the node. If the node is already started, this method is no-op.
|
||||
*/
|
||||
GNode stop() {
|
||||
node.stop()
|
||||
this
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the node (and {@link #stop}s if it's running).
|
||||
*/
|
||||
GNode close() {
|
||||
node.close()
|
||||
this
|
||||
}
|
||||
}
|
|
@ -1,69 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.node
|
||||
|
||||
import org.elasticsearch.common.settings.ImmutableSettings
|
||||
import org.elasticsearch.common.settings.Settings
|
||||
import org.elasticsearch.common.settings.loader.JsonSettingsLoader
|
||||
import org.elasticsearch.groovy.common.xcontent.GXContentBuilder
|
||||
import org.elasticsearch.node.Node
|
||||
import org.elasticsearch.node.internal.InternalNode
|
||||
|
||||
/**
|
||||
* The node builder allow to build a {@link GNode} instance.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
class GNodeBuilder {
|
||||
|
||||
private final ImmutableSettings.Builder settingsBuilder = ImmutableSettings.settingsBuilder()
|
||||
|
||||
private boolean loadConfigSettings = true
|
||||
|
||||
static GNodeBuilder nodeBuilder() {
|
||||
new GNodeBuilder()
|
||||
}
|
||||
|
||||
ImmutableSettings.Builder getSettings() {
|
||||
settingsBuilder
|
||||
}
|
||||
|
||||
ImmutableSettings.Builder settings(Settings.Builder settings) {
|
||||
settingsBuilder.put(settings.build())
|
||||
}
|
||||
|
||||
ImmutableSettings.Builder settings(Settings settings) {
|
||||
settingsBuilder.put(settings)
|
||||
}
|
||||
|
||||
ImmutableSettings.Builder settings(Closure settings) {
|
||||
byte[] settingsBytes = new GXContentBuilder().buildAsBytes(settings)
|
||||
settingsBuilder.put(new JsonSettingsLoader().load(settingsBytes))
|
||||
}
|
||||
|
||||
GNode build() {
|
||||
Node node = new InternalNode(settingsBuilder.build(), loadConfigSettings)
|
||||
new GNode(node)
|
||||
}
|
||||
|
||||
GNode node() {
|
||||
build().start()
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
plugin=org.elasticsearch.plugin.groovy.GroovyPlugin
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.groovy;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.groovy.GroovyScriptEngineService;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class GroovyPlugin extends AbstractPlugin {
|
||||
|
||||
@Override public String name() {
|
||||
return "lang-groovy";
|
||||
}
|
||||
|
||||
@Override public String description() {
|
||||
return "Groovy plugin allowing to add groovy scripting support";
|
||||
}
|
||||
|
||||
@Override public void processModule(Module module) {
|
||||
if (module instanceof ScriptModule) {
|
||||
((ScriptModule) module).addScriptEngine(GroovyScriptEngineService.class);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,201 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.groovy;
|
||||
|
||||
import groovy.lang.Binding;
|
||||
import groovy.lang.GroovyClassLoader;
|
||||
import groovy.lang.Script;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptException;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class GroovyScriptEngineService extends AbstractComponent implements ScriptEngineService {
|
||||
|
||||
private final AtomicLong counter = new AtomicLong();
|
||||
|
||||
private final GroovyClassLoader loader;
|
||||
|
||||
@Inject public GroovyScriptEngineService(Settings settings) {
|
||||
super(settings);
|
||||
this.loader = new GroovyClassLoader(settings.getClassLoader());
|
||||
}
|
||||
|
||||
@Override public void close() {
|
||||
loader.clearCache();
|
||||
}
|
||||
|
||||
@Override public String[] types() {
|
||||
return new String[]{"groovy"};
|
||||
}
|
||||
|
||||
@Override public String[] extensions() {
|
||||
return new String[]{"groovy"};
|
||||
}
|
||||
|
||||
@Override public Object compile(String script) {
|
||||
return loader.parseClass(script, generateScriptName());
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override public ExecutableScript executable(Object compiledScript, Map<String, Object> vars) {
|
||||
try {
|
||||
Class scriptClass = (Class) compiledScript;
|
||||
Script scriptObject = (Script) scriptClass.newInstance();
|
||||
Binding binding = new Binding();
|
||||
if (vars != null) {
|
||||
binding.getVariables().putAll(vars);
|
||||
}
|
||||
scriptObject.setBinding(binding);
|
||||
return new GroovyExecutableScript(scriptObject);
|
||||
} catch (Exception e) {
|
||||
throw new ScriptException("failed to build executable script", e);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override public SearchScript search(Object compiledScript, SearchLookup lookup, @Nullable Map<String, Object> vars) {
|
||||
try {
|
||||
Class scriptClass = (Class) compiledScript;
|
||||
Script scriptObject = (Script) scriptClass.newInstance();
|
||||
Binding binding = new Binding();
|
||||
binding.getVariables().putAll(lookup.asMap());
|
||||
if (vars != null) {
|
||||
binding.getVariables().putAll(vars);
|
||||
}
|
||||
scriptObject.setBinding(binding);
|
||||
return new GroovySearchScript(scriptObject, lookup);
|
||||
} catch (Exception e) {
|
||||
throw new ScriptException("failed to build search script", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public Object execute(Object compiledScript, Map<String, Object> vars) {
|
||||
try {
|
||||
Class scriptClass = (Class) compiledScript;
|
||||
Script scriptObject = (Script) scriptClass.newInstance();
|
||||
Binding binding = new Binding(vars);
|
||||
scriptObject.setBinding(binding);
|
||||
return scriptObject.run();
|
||||
} catch (Exception e) {
|
||||
throw new ScriptException("failed to execute script", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public Object unwrap(Object value) {
|
||||
return value;
|
||||
}
|
||||
|
||||
private String generateScriptName() {
|
||||
return "Script" + counter.incrementAndGet() + ".groovy";
|
||||
}
|
||||
|
||||
public static class GroovyExecutableScript implements ExecutableScript {
|
||||
|
||||
private final Script script;
|
||||
|
||||
public GroovyExecutableScript(Script script) {
|
||||
this.script = script;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override public void setNextVar(String name, Object value) {
|
||||
script.getBinding().getVariables().put(name, value);
|
||||
}
|
||||
|
||||
@Override public Object run() {
|
||||
return script.run();
|
||||
}
|
||||
|
||||
@Override public Object unwrap(Object value) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
public static class GroovySearchScript implements SearchScript {
|
||||
|
||||
private final Script script;
|
||||
|
||||
private final SearchLookup lookup;
|
||||
|
||||
public GroovySearchScript(Script script, SearchLookup lookup) {
|
||||
this.script = script;
|
||||
this.lookup = lookup;
|
||||
}
|
||||
|
||||
@Override public void setScorer(Scorer scorer) {
|
||||
lookup.setScorer(scorer);
|
||||
}
|
||||
|
||||
@Override public void setNextReader(IndexReader reader) {
|
||||
lookup.setNextReader(reader);
|
||||
}
|
||||
|
||||
@Override public void setNextDocId(int doc) {
|
||||
lookup.setNextDocId(doc);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override public void setNextScore(float score) {
|
||||
script.getBinding().getVariables().put("_score", score);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override public void setNextVar(String name, Object value) {
|
||||
script.getBinding().getVariables().put(name, value);
|
||||
}
|
||||
|
||||
@Override public void setNextSource(Map<String, Object> source) {
|
||||
lookup.source().setNextSource(source);
|
||||
}
|
||||
|
||||
@Override public Object run() {
|
||||
return script.run();
|
||||
}
|
||||
|
||||
@Override public float runAsFloat() {
|
||||
return ((Number) run()).floatValue();
|
||||
}
|
||||
|
||||
@Override public long runAsLong() {
|
||||
return ((Number) run()).longValue();
|
||||
}
|
||||
|
||||
@Override public double runAsDouble() {
|
||||
return ((Number) run()).doubleValue();
|
||||
}
|
||||
|
||||
@Override public Object unwrap(Object value) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
log4j.rootLogger=INFO, out
|
||||
log4j.logger.jgroups=WARN
|
||||
|
||||
#log4j.logger.discovery=TRACE
|
||||
#log4j.logger.cluster.service=TRACE
|
||||
#log4j.logger.cluster.action.shard=DEBUG
|
||||
#log4j.logger.indices.cluster=DEBUG
|
||||
#log4j.logger.index=TRACE
|
||||
#log4j.logger.index.engine=DEBUG
|
||||
#log4j.logger.index.shard.service=DEBUG
|
||||
#log4j.logger.index.shard.recovery=DEBUG
|
||||
#log4j.logger.index.cache=DEBUG
|
||||
#log4j.logger.http=TRACE
|
||||
#log4j.logger.monitor.memory=TRACE
|
||||
#log4j.logger.monitor.memory=TRACE
|
||||
#log4j.logger.cluster.action.shard=TRACE
|
||||
#log4j.logger.index.gateway=TRACE
|
||||
|
||||
log4j.appender.out=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.out.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.out.layout.ConversionPattern=[%d{ABSOLUTE}][%-5p][%-25c] %m%n
|
|
@ -1,136 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.common.xcontent
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
class GXContentBuilderTests extends GroovyTestCase {
|
||||
|
||||
void testSimple() {
|
||||
def builder = new GXContentBuilder()
|
||||
|
||||
def result = builder.buildAsString {
|
||||
rootprop = 'something'
|
||||
}
|
||||
|
||||
assertEquals '{"rootprop":"something"}', result.toString()
|
||||
}
|
||||
|
||||
void testArrays() {
|
||||
def builder = new GXContentBuilder()
|
||||
|
||||
def result = builder.buildAsString {
|
||||
categories = ['a', 'b', 'c']
|
||||
rootprop = 'something'
|
||||
}
|
||||
|
||||
assertEquals '{"categories":["a","b","c"],"rootprop":"something"}', result.toString()
|
||||
}
|
||||
|
||||
void testSubObjects() {
|
||||
def builder = new GXContentBuilder()
|
||||
|
||||
def result = builder.buildAsString {
|
||||
categories = ['a', 'b', 'c']
|
||||
rootprop = 'something'
|
||||
test {
|
||||
subprop = 10
|
||||
}
|
||||
}
|
||||
|
||||
assertEquals '{"categories":["a","b","c"],"rootprop":"something","test":{"subprop":10}}', result.toString()
|
||||
}
|
||||
|
||||
void testAssignedObjects() {
|
||||
def builder = new GXContentBuilder()
|
||||
|
||||
def result = builder.buildAsString {
|
||||
categories = ['a', 'b', 'c']
|
||||
rootprop = 'something'
|
||||
test = {
|
||||
subprop = 10
|
||||
}
|
||||
}
|
||||
|
||||
assertEquals '{"categories":["a","b","c"],"rootprop":"something","test":{"subprop":10}}', result.toString()
|
||||
}
|
||||
|
||||
void testNamedArgumentHandling() {
|
||||
def builder = new GXContentBuilder()
|
||||
def result = builder.buildAsString {
|
||||
categories = ['a', 'b', 'c']
|
||||
rootprop = 'something'
|
||||
test subprop: 10, three: [1, 2, 3]
|
||||
}
|
||||
|
||||
assertEquals '{"categories":["a","b","c"],"rootprop":"something","test":{"subprop":10,"three":[1,2,3]}}', result.toString()
|
||||
}
|
||||
|
||||
|
||||
void testArrayOfClosures() {
|
||||
def builder = new GXContentBuilder()
|
||||
def result = builder.buildAsString {
|
||||
foo = [{ bar = 'hello' }]
|
||||
}
|
||||
|
||||
assertEquals '{"foo":[{"bar":"hello"}]}', result.toString()
|
||||
}
|
||||
|
||||
void testExampleFromReferenceGuide() {
|
||||
def builder = new GXContentBuilder()
|
||||
|
||||
def results = ['one', 'two', 'three']
|
||||
|
||||
def result = builder.buildAsString {
|
||||
books = results.collect {
|
||||
[title: it]
|
||||
}
|
||||
}
|
||||
|
||||
assertEquals '{"books":[{"title":"one"},{"title":"two"},{"title":"three"}]}', result.toString()
|
||||
|
||||
result = builder.buildAsString {
|
||||
books = array {
|
||||
for (b in results) {
|
||||
book title: b
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assertEquals '{"books":[{"title":"one"},{"title":"two"},{"title":"three"}]}', result.toString()
|
||||
}
|
||||
|
||||
void testAppendToArray() {
|
||||
def builder = new GXContentBuilder()
|
||||
|
||||
def results = ['one', 'two', 'three']
|
||||
|
||||
def result = builder.buildAsString {
|
||||
books = array { list ->
|
||||
for (b in results) {
|
||||
list << [title: b]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assertEquals '{"books":[{"title":"one"},{"title":"two"},{"title":"three"}]}', result.toString()
|
||||
}
|
||||
}
|
|
@ -1,97 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.test.client
|
||||
|
||||
import org.elasticsearch.groovy.node.GNode
|
||||
import org.elasticsearch.groovy.node.GNodeBuilder
|
||||
import org.testng.annotations.AfterMethod
|
||||
import org.testng.annotations.BeforeMethod
|
||||
import org.testng.annotations.Test
|
||||
import static org.hamcrest.MatcherAssert.*
|
||||
import static org.hamcrest.Matchers.*
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
|
||||
class BuilderActionsTests {
|
||||
|
||||
def GNode node
|
||||
|
||||
@BeforeMethod
|
||||
protected void setUp() {
|
||||
GNodeBuilder nodeBuilder = new GNodeBuilder()
|
||||
nodeBuilder.settings {
|
||||
node {
|
||||
local = true
|
||||
}
|
||||
gateway {
|
||||
type = 'none'
|
||||
}
|
||||
}
|
||||
|
||||
node = nodeBuilder.node()
|
||||
}
|
||||
|
||||
@AfterMethod
|
||||
protected void tearDown() {
|
||||
node.close()
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSimpleOperations() {
|
||||
def indexR = node.client.prepareIndex('test', 'type1', '1').setSource({
|
||||
test = 'value'
|
||||
complex {
|
||||
value1 = 'value1'
|
||||
value2 = 'value2'
|
||||
}
|
||||
}).gexecute()
|
||||
|
||||
assertThat indexR.response.index, equalTo('test')
|
||||
assertThat indexR.response.type, equalTo('type1')
|
||||
assertThat indexR.response.id, equalTo('1')
|
||||
|
||||
node.client.admin.indices.refresh {}.actionGet()
|
||||
|
||||
def countR = node.client.prepareCount('test').setQuery({
|
||||
term(test: 'value')
|
||||
}).gexecute()
|
||||
|
||||
assertThat countR.response.count, equalTo(1l)
|
||||
|
||||
def searchR = node.client.prepareSearch('test').setQuery({
|
||||
term(test: 'value')
|
||||
}).gexecute()
|
||||
|
||||
assertThat searchR.response.hits.totalHits, equalTo(1l)
|
||||
|
||||
def delete = node.client.prepareDelete('test', 'type1', '1').gexecute()
|
||||
assertThat delete.response.index, equalTo('test')
|
||||
assertThat delete.response.type, equalTo('type1')
|
||||
assertThat delete.response.id, equalTo('1')
|
||||
|
||||
def refresh = node.client.admin.indices.refresh {}
|
||||
assertThat refresh.response.failedShards, equalTo(0)
|
||||
|
||||
def get = node.client.prepareGet('test', 'type1', '1').gexecute()
|
||||
assertThat get.response.exists, equalTo(false)
|
||||
}
|
||||
}
|
|
@ -1,156 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.test.client
|
||||
|
||||
import java.util.concurrent.CountDownLatch
|
||||
import org.elasticsearch.action.index.IndexRequest
|
||||
import org.elasticsearch.action.index.IndexResponse
|
||||
import org.elasticsearch.groovy.node.GNode
|
||||
import org.elasticsearch.groovy.node.GNodeBuilder
|
||||
import org.testng.annotations.AfterMethod
|
||||
import org.testng.annotations.BeforeMethod
|
||||
import org.testng.annotations.Test
|
||||
import static org.elasticsearch.client.Requests.*
|
||||
import static org.hamcrest.MatcherAssert.*
|
||||
import static org.hamcrest.Matchers.*
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
class DifferentApiExecutionTests {
|
||||
|
||||
def GNode node
|
||||
|
||||
@BeforeMethod
|
||||
protected void setUp() {
|
||||
GNodeBuilder nodeBuilder = new GNodeBuilder()
|
||||
nodeBuilder.settings {
|
||||
node {
|
||||
local = true
|
||||
}
|
||||
gateway {
|
||||
type = 'none'
|
||||
}
|
||||
}
|
||||
|
||||
node = nodeBuilder.node()
|
||||
}
|
||||
|
||||
@AfterMethod
|
||||
protected void tearDown() {
|
||||
node.close()
|
||||
}
|
||||
|
||||
@Test
|
||||
void verifyDifferentApiExecutions() {
|
||||
def response = node.client.index(new IndexRequest(
|
||||
index: 'test',
|
||||
type: 'type1',
|
||||
id: '1',
|
||||
source: {
|
||||
test = 'value'
|
||||
complex {
|
||||
value1 = 'value1'
|
||||
value2 = 'value2'
|
||||
}
|
||||
})).response
|
||||
assertThat response.index, equalTo('test')
|
||||
assertThat response.type, equalTo('type1')
|
||||
assertThat response.id, equalTo('1')
|
||||
|
||||
def refresh = node.client.admin.indices.refresh {}
|
||||
assertThat 0, equalTo(refresh.response.failedShards)
|
||||
|
||||
def getR = node.client.get {
|
||||
index 'test'
|
||||
type 'type1'
|
||||
id '1'
|
||||
}
|
||||
assertThat getR.response.exists, equalTo(true)
|
||||
assertThat getR.response.index, equalTo('test')
|
||||
assertThat getR.response.type, equalTo('type1')
|
||||
assertThat getR.response.id, equalTo('1')
|
||||
assertThat getR.response.sourceAsString(), equalTo('{"test":"value","complex":{"value1":"value1","value2":"value2"}}')
|
||||
assertThat getR.response.source.test, equalTo('value')
|
||||
assertThat getR.response.source.complex.value1, equalTo('value1')
|
||||
|
||||
response = node.client.index({
|
||||
index = 'test'
|
||||
type = 'type1'
|
||||
id = '1'
|
||||
source = {
|
||||
test = 'value'
|
||||
complex {
|
||||
value1 = 'value1'
|
||||
value2 = 'value2'
|
||||
}
|
||||
}
|
||||
}).response
|
||||
assertThat response.index, equalTo('test')
|
||||
assertThat response.type, equalTo('type1')
|
||||
assertThat response.id, equalTo('1')
|
||||
|
||||
def indexR = node.client.index(indexRequest().with {
|
||||
index 'test'
|
||||
type 'type1'
|
||||
id '1'
|
||||
source {
|
||||
test = 'value'
|
||||
complex {
|
||||
value1 = 'value1'
|
||||
value2 = 'value2'
|
||||
}
|
||||
}
|
||||
})
|
||||
CountDownLatch latch = new CountDownLatch(1)
|
||||
indexR.success = { IndexResponse responseX ->
|
||||
assertThat responseX.index, equalTo('test')
|
||||
assertThat indexR.response.index, equalTo('test')
|
||||
assertThat responseX.type, equalTo('type1')
|
||||
assertThat indexR.response.type, equalTo('type1')
|
||||
assertThat response.id, equalTo('1')
|
||||
assertThat indexR.response.id, equalTo('1')
|
||||
latch.countDown()
|
||||
}
|
||||
latch.await()
|
||||
|
||||
indexR = node.client.index {
|
||||
index 'test'
|
||||
type 'type1'
|
||||
id '1'
|
||||
source {
|
||||
test = 'value'
|
||||
complex {
|
||||
value1 = 'value1'
|
||||
value2 = 'value2'
|
||||
}
|
||||
}
|
||||
}
|
||||
latch = new CountDownLatch(1)
|
||||
indexR.listener = {
|
||||
assertThat indexR.response.index, equalTo('test')
|
||||
assertThat indexR.response.type, equalTo('type1')
|
||||
assertThat indexR.response.id, equalTo('1')
|
||||
latch.countDown()
|
||||
}
|
||||
latch.await()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,161 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.test.client
|
||||
|
||||
import org.elasticsearch.groovy.node.GNode
|
||||
import org.elasticsearch.groovy.node.GNodeBuilder
|
||||
import org.testng.annotations.AfterMethod
|
||||
import org.testng.annotations.BeforeMethod
|
||||
import org.testng.annotations.Test
|
||||
import static org.hamcrest.MatcherAssert.*
|
||||
import static org.hamcrest.Matchers.*
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
class SimpleActionsTests {
|
||||
|
||||
def GNode node
|
||||
|
||||
@BeforeMethod
|
||||
protected void setUp() {
|
||||
GNodeBuilder nodeBuilder = new GNodeBuilder()
|
||||
nodeBuilder.settings {
|
||||
node {
|
||||
local = true
|
||||
}
|
||||
gateway {
|
||||
type = 'none'
|
||||
}
|
||||
}
|
||||
|
||||
node = nodeBuilder.node()
|
||||
}
|
||||
|
||||
@AfterMethod
|
||||
protected void tearDown() {
|
||||
node.close()
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSimpleOperations() {
|
||||
def value1 = new org.elasticsearch.groovy.common.xcontent.GXContentBuilder().buildAsString {
|
||||
something = 'test'
|
||||
}
|
||||
println value1
|
||||
|
||||
def indexR = node.client.index {
|
||||
index 'test'
|
||||
type 'type1'
|
||||
id '1'
|
||||
source {
|
||||
test = 'value'
|
||||
complex {
|
||||
value1 = 'value1'
|
||||
value2 = 'value2'
|
||||
}
|
||||
}
|
||||
}
|
||||
assertThat indexR.response.index, equalTo('test')
|
||||
assertThat indexR.response.type, equalTo('type1')
|
||||
assertThat indexR.response.id, equalTo('1')
|
||||
|
||||
def delete = node.client.delete {
|
||||
index 'test'
|
||||
type 'type1'
|
||||
id '1'
|
||||
}
|
||||
assertThat delete.response.index, equalTo('test')
|
||||
assertThat delete.response.type, equalTo('type1')
|
||||
assertThat delete.response.id, equalTo('1')
|
||||
|
||||
def refresh = node.client.admin.indices.refresh {}
|
||||
assertThat refresh.response.failedShards, equalTo(0)
|
||||
|
||||
def get = node.client.get {
|
||||
index 'test'
|
||||
type 'type1'
|
||||
id '1'
|
||||
}
|
||||
assertThat get.response.exists, equalTo(false)
|
||||
|
||||
indexR = node.client.index {
|
||||
index 'test'
|
||||
type 'type1'
|
||||
id '1'
|
||||
source {
|
||||
test = 'value'
|
||||
complex {
|
||||
value1 = 'value1'
|
||||
value2 = 'value2'
|
||||
}
|
||||
}
|
||||
}
|
||||
assertThat indexR.response.index, equalTo('test')
|
||||
assertThat indexR.response.type, equalTo('type1')
|
||||
assertThat indexR.response.id, equalTo('1')
|
||||
|
||||
refresh = node.client.admin.indices.refresh {}
|
||||
assertThat refresh.response.failedShards, equalTo(0)
|
||||
|
||||
def count = node.client.count {
|
||||
indices 'test'
|
||||
types 'type1'
|
||||
query {
|
||||
term {
|
||||
test = 'value'
|
||||
}
|
||||
}
|
||||
}
|
||||
assertThat count.response.failedShards, equalTo(0)
|
||||
assertThat count.response.count, equalTo(1l)
|
||||
|
||||
def search = node.client.search {
|
||||
indices 'test'
|
||||
types 'type1'
|
||||
source {
|
||||
query {
|
||||
term(test: 'value')
|
||||
}
|
||||
}
|
||||
}
|
||||
assertThat search.response.failedShards, equalTo(0)
|
||||
assertThat search.response.hits.totalHits, equalTo(1l)
|
||||
assertThat search.response.hits[0].source.test, equalTo('value')
|
||||
|
||||
def deleteByQuery = node.client.deleteByQuery {
|
||||
indices 'test'
|
||||
query {
|
||||
term(test: 'value')
|
||||
}
|
||||
}
|
||||
assertThat deleteByQuery.response.indices.test.failedShards, equalTo(0)
|
||||
|
||||
refresh = node.client.admin.indices.refresh {}
|
||||
assertThat refresh.response.failedShards, equalTo(0)
|
||||
|
||||
get = node.client.get {
|
||||
index 'test'
|
||||
type 'type1'
|
||||
id '1'
|
||||
}
|
||||
assertThat get.response.exists, equalTo(false)
|
||||
}
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.groovy.test.node
|
||||
|
||||
import org.elasticsearch.groovy.node.GNode
|
||||
import org.elasticsearch.groovy.node.GNodeBuilder
|
||||
import static org.elasticsearch.groovy.node.GNodeBuilder.*
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
class GNodeBuilderTests extends GroovyTestCase {
|
||||
|
||||
void testGNodeBuilder() {
|
||||
GNodeBuilder nodeBuilder = nodeBuilder()
|
||||
nodeBuilder.settings {
|
||||
node {
|
||||
local = true
|
||||
}
|
||||
cluster {
|
||||
name = 'test'
|
||||
}
|
||||
}
|
||||
GNode node = nodeBuilder.node()
|
||||
node.stop().close()
|
||||
}
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
//@Grapes([
|
||||
// @Grab(group = 'org.elasticsearch', module = 'elasticsearch-groovy', version = '0.7.0-SNAPSHOT'),
|
||||
// @Grab(group = 'org.slf4j', module = 'slf4j-simple', version = '1.5.8')
|
||||
///* @Grab(group = 'org.slf4j', module = 'slf4j-log4j12', version = '1.5.8')*/
|
||||
//])
|
||||
|
||||
def startNode() {
|
||||
def nodeBuilder = new org.elasticsearch.groovy.node.GNodeBuilder()
|
||||
nodeBuilder.settings {
|
||||
node {
|
||||
client = true
|
||||
}
|
||||
}
|
||||
nodeBuilder.node()
|
||||
}
|
||||
|
||||
|
||||
def node = startNode()
|
||||
|
||||
println "settings $node.settings.asMap"
|
||||
|
||||
println "Node started"
|
||||
|
||||
future = node.client.index {
|
||||
index "twitter"
|
||||
lang "tweet"
|
||||
id "1"
|
||||
source {
|
||||
user = "kimchy"
|
||||
message = "this is a tweet"
|
||||
}
|
||||
}
|
||||
|
||||
println "Indexed $future.response.index/$future.response.type/$future.response.id"
|
||||
|
||||
node.close()
|
|
@ -1,168 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.groovy;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.util.concurrent.jsr166y.ThreadLocalRandom;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.CyclicBarrier;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@Test
|
||||
public class GroovyScriptMultiThreadedTest {
|
||||
|
||||
protected final ESLogger logger = Loggers.getLogger(getClass());
|
||||
|
||||
@Test public void testExecutableNoRuntimeParams() throws Exception {
|
||||
final GroovyScriptEngineService se = new GroovyScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
final Object compiled = se.compile("x + y");
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
|
||||
Thread[] threads = new Thread[50];
|
||||
final CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(new Runnable() {
|
||||
@Override public void run() {
|
||||
try {
|
||||
barrier.await();
|
||||
long x = ThreadLocalRandom.current().nextInt();
|
||||
long y = ThreadLocalRandom.current().nextInt();
|
||||
long addition = x + y;
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
vars.put("x", x);
|
||||
vars.put("y", y);
|
||||
ExecutableScript script = se.executable(compiled, vars);
|
||||
for (int i = 0; i < 100000; i++) {
|
||||
long result = ((Number) script.run()).longValue();
|
||||
assertThat(result, equalTo(addition));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
failed.set(true);
|
||||
logger.error("failed", t);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i].start();
|
||||
}
|
||||
barrier.await();
|
||||
latch.await();
|
||||
assertThat(failed.get(), equalTo(false));
|
||||
}
|
||||
|
||||
|
||||
@Test public void testExecutableWithRuntimeParams() throws Exception {
|
||||
final GroovyScriptEngineService se = new GroovyScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
final Object compiled = se.compile("x + y");
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
|
||||
Thread[] threads = new Thread[50];
|
||||
final CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(new Runnable() {
|
||||
@Override public void run() {
|
||||
try {
|
||||
barrier.await();
|
||||
long x = ThreadLocalRandom.current().nextInt();
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
vars.put("x", x);
|
||||
ExecutableScript script = se.executable(compiled, vars);
|
||||
for (int i = 0; i < 100000; i++) {
|
||||
long y = ThreadLocalRandom.current().nextInt();
|
||||
long addition = x + y;
|
||||
script.setNextVar("y", y);
|
||||
long result = ((Number) script.run()).longValue();
|
||||
assertThat(result, equalTo(addition));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
failed.set(true);
|
||||
logger.error("failed", t);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i].start();
|
||||
}
|
||||
barrier.await();
|
||||
latch.await();
|
||||
assertThat(failed.get(), equalTo(false));
|
||||
}
|
||||
|
||||
@Test public void testExecute() throws Exception {
|
||||
final GroovyScriptEngineService se = new GroovyScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
final Object compiled = se.compile("x + y");
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
|
||||
Thread[] threads = new Thread[50];
|
||||
final CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(new Runnable() {
|
||||
@Override public void run() {
|
||||
try {
|
||||
barrier.await();
|
||||
Map<String, Object> runtimeVars = new HashMap<String, Object>();
|
||||
for (int i = 0; i < 100000; i++) {
|
||||
long x = ThreadLocalRandom.current().nextInt();
|
||||
long y = ThreadLocalRandom.current().nextInt();
|
||||
long addition = x + y;
|
||||
runtimeVars.put("x", x);
|
||||
runtimeVars.put("y", y);
|
||||
long result = ((Number) se.execute(compiled, runtimeVars)).longValue();
|
||||
assertThat(result, equalTo(addition));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
failed.set(true);
|
||||
logger.error("failed", t);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i].start();
|
||||
}
|
||||
barrier.await();
|
||||
latch.await();
|
||||
assertThat(failed.get(), equalTo(false));
|
||||
}
|
||||
}
|
|
@ -1,262 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.groovy;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.network.NetworkUtils;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.NodeBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.testng.annotations.AfterMethod;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.client.Requests.*;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.*;
|
||||
import static org.elasticsearch.index.query.FilterBuilders.*;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.*;
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class GroovyScriptSearchTests {
|
||||
|
||||
protected final ESLogger logger = Loggers.getLogger(getClass());
|
||||
|
||||
private Node node;
|
||||
|
||||
private Client client;
|
||||
|
||||
@BeforeMethod public void createNodes() throws Exception {
|
||||
node = NodeBuilder.nodeBuilder().settings(ImmutableSettings.settingsBuilder()
|
||||
.put("cluster.name", "test-cluster-" + NetworkUtils.getLocalAddress())
|
||||
.put("gateway.type", "none")
|
||||
.put("number_of_shards", 1)).node();
|
||||
client = node.client();
|
||||
}
|
||||
|
||||
@AfterMethod public void closeNodes() {
|
||||
client.close();
|
||||
node.close();
|
||||
}
|
||||
|
||||
@Test public void testGroovyScriptFilter() throws Exception {
|
||||
client.admin().indices().prepareCreate("test").execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().prepareFlush().execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "2")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().prepareFlush().execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "3")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
logger.info("running doc['num1'].value > 1");
|
||||
SearchResponse response = client.prepareSearch()
|
||||
.setQuery(filteredQuery(matchAllQuery(), scriptFilter("doc['num1'].value > 1").lang("groovy")))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", "groovy", "doc['num1'].value", null)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat((Double) response.hits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("3"));
|
||||
assertThat((Double) response.hits().getAt(1).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
|
||||
logger.info("running doc['num1'].value > param1");
|
||||
response = client.prepareSearch()
|
||||
.setQuery(filteredQuery(matchAllQuery(), scriptFilter("doc['num1'].value > param1").lang("groovy").addParam("param1", 2)))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", "groovy", "doc['num1'].value", null)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(1l));
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("3"));
|
||||
assertThat((Double) response.hits().getAt(0).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
|
||||
logger.info("running doc['num1'].value > param1");
|
||||
response = client.prepareSearch()
|
||||
.setQuery(filteredQuery(matchAllQuery(), scriptFilter("doc['num1'].value > param1").lang("groovy").addParam("param1", -1)))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", "groovy", "doc['num1'].value", null)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(3l));
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("1"));
|
||||
assertThat((Double) response.hits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("2"));
|
||||
assertThat((Double) response.hits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0));
|
||||
assertThat(response.hits().getAt(2).id(), equalTo("3"));
|
||||
assertThat((Double) response.hits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Test public void testScriptFieldUsingSource() throws Exception {
|
||||
client.admin().indices().prepareCreate("test").execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.startObject("obj1").field("test", "something").endObject()
|
||||
.startObject("obj2").startArray("arr2").value("arr_value1").value("arr_value2").endArray().endObject()
|
||||
.endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
SearchResponse response = client.prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addField("_source.obj1") // we also automatically detect _source in fields
|
||||
.addScriptField("s_obj1", "groovy", "_source.obj1", null)
|
||||
.addScriptField("s_obj1_test", "groovy", "_source.obj1.test", null)
|
||||
.addScriptField("s_obj2", "groovy", "_source.obj2", null)
|
||||
.addScriptField("s_obj2_arr2", "groovy", "_source.obj2.arr2", null)
|
||||
.execute().actionGet();
|
||||
|
||||
Map<String, Object> sObj1 = (Map<String, Object>) response.hits().getAt(0).field("_source.obj1").value();
|
||||
assertThat(sObj1.get("test").toString(), equalTo("something"));
|
||||
assertThat(response.hits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something"));
|
||||
|
||||
sObj1 = (Map<String, Object>) response.hits().getAt(0).field("s_obj1").value();
|
||||
assertThat(sObj1.get("test").toString(), equalTo("something"));
|
||||
assertThat(response.hits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something"));
|
||||
|
||||
Map<String, Object> sObj2 = (Map<String, Object>) response.hits().getAt(0).field("s_obj2").value();
|
||||
List sObj2Arr2 = (List) sObj2.get("arr2");
|
||||
assertThat(sObj2Arr2.size(), equalTo(2));
|
||||
assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1"));
|
||||
assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2"));
|
||||
|
||||
sObj2Arr2 = (List) response.hits().getAt(0).field("s_obj2_arr2").value();
|
||||
assertThat(sObj2Arr2.size(), equalTo(2));
|
||||
assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1"));
|
||||
assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2"));
|
||||
}
|
||||
|
||||
@Test public void testCustomScriptBoost() throws Exception {
|
||||
// execute a search before we create an index
|
||||
try {
|
||||
client.prepareSearch().setQuery(termQuery("test", "value")).execute().actionGet();
|
||||
assert false : "should fail";
|
||||
} catch (Exception e) {
|
||||
// ignore, no indices
|
||||
}
|
||||
|
||||
try {
|
||||
client.prepareSearch("test").setQuery(termQuery("test", "value")).execute().actionGet();
|
||||
assert false : "should fail";
|
||||
} catch (Exception e) {
|
||||
// ignore, no indices
|
||||
}
|
||||
|
||||
client.admin().indices().create(createIndexRequest("test")).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("1")
|
||||
.source(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject())).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("2")
|
||||
.source(jsonBuilder().startObject().field("test", "value check").field("num1", 2.0f).endObject())).actionGet();
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
logger.info("--- QUERY_THEN_FETCH");
|
||||
|
||||
logger.info("running doc['num1'].value");
|
||||
SearchResponse response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("doc['num1'].value").lang("groovy")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running -doc['num1'].value");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("-doc['num1'].value").lang("groovy")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("2"));
|
||||
|
||||
|
||||
logger.info("running pow(doc['num1'].value, 2)");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("Math.pow(doc['num1'].value, 2)").lang("groovy")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running max(doc['num1'].value, 1)");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("Math.max(doc['num1'].value, 1d)").lang("groovy")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running doc['num1'].value * _score");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("doc['num1'].value * _score").lang("groovy")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running param1 * param2 * _score");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("param1 * param2 * _score").param("param1", 2).param("param2", 2).lang("groovy")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
}
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.groovy;
|
||||
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class SimpleBench {
|
||||
|
||||
public static void main(String[] args) {
|
||||
GroovyScriptEngineService se = new GroovyScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
Object compiled = se.compile("x + y");
|
||||
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
// warm up
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
vars.put("x", i);
|
||||
vars.put("y", i + 1);
|
||||
se.execute(compiled, vars);
|
||||
}
|
||||
|
||||
final long ITER = 100000;
|
||||
|
||||
StopWatch stopWatch = new StopWatch().start();
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
se.execute(compiled, vars);
|
||||
}
|
||||
System.out.println("Execute Took: " + stopWatch.stop().lastTaskTime());
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
ExecutableScript executableScript = se.executable(compiled, vars);
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
executableScript.run();
|
||||
}
|
||||
System.out.println("Executable Took: " + stopWatch.stop().lastTaskTime());
|
||||
}
|
||||
}
|
|
@ -1,138 +0,0 @@
|
|||
dependsOn(':elasticsearch')
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
apply plugin: 'eclipse'
|
||||
|
||||
archivesBaseName = "elasticsearch-lang-javascript"
|
||||
|
||||
explodedDistDir = new File(distsDir, 'exploded')
|
||||
|
||||
configurations.compile.transitive = true
|
||||
configurations.testCompile.transitive = true
|
||||
|
||||
// no need to use the resource dir
|
||||
sourceSets.main.resources.srcDirs 'src/main/java'
|
||||
sourceSets.test.resources.srcDirs 'src/test/java'
|
||||
|
||||
// add the source files to the dist jar
|
||||
//jar {
|
||||
// from sourceSets.main.allSource
|
||||
//}
|
||||
|
||||
configurations {
|
||||
dists
|
||||
distLib {
|
||||
visible = false
|
||||
transitive = false
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':elasticsearch')
|
||||
|
||||
compile('org.mozilla:rhino:1.7R3')
|
||||
distLib('org.mozilla:rhino:1.7R3') { transitive = false }
|
||||
}
|
||||
|
||||
task explodedDist(dependsOn: [jar], description: 'Builds the plugin zip file') << {
|
||||
[explodedDistDir]*.mkdirs()
|
||||
|
||||
copy {
|
||||
from configurations.distLib
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
// remove elasticsearch files (compile above adds the elasticsearch one)
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*.jar") }
|
||||
|
||||
copy {
|
||||
from libsDir
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-javadoc.jar") }
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-sources.jar") }
|
||||
}
|
||||
|
||||
task zip(type: Zip, dependsOn: ['explodedDist']) {
|
||||
from(explodedDistDir) {
|
||||
}
|
||||
}
|
||||
|
||||
task release(dependsOn: [zip]) << {
|
||||
ant.delete(dir: explodedDistDir)
|
||||
copy {
|
||||
from distsDir
|
||||
into(new File(rootProject.distsDir, "plugins"))
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
deployerJars
|
||||
}
|
||||
|
||||
dependencies {
|
||||
deployerJars "org.apache.maven.wagon:wagon-http:1.0-beta-2"
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier = 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
jar {
|
||||
// from sourceSets.main.allJava
|
||||
manifest {
|
||||
attributes("Implementation-Title": "ElasticSearch", "Implementation-Version": rootProject.version, "Implementation-Date": buildTimeStr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
artifacts {
|
||||
archives sourcesJar
|
||||
archives javadocJar
|
||||
}
|
||||
|
||||
uploadArchives {
|
||||
repositories.mavenDeployer {
|
||||
configuration = configurations.deployerJars
|
||||
repository(url: rootProject.mavenRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
snapshotRepository(url: rootProject.mavenSnapshotRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
|
||||
pom.project {
|
||||
inceptionYear '2009'
|
||||
name 'elasticsearch-plugins-lang-javascript'
|
||||
description 'JavaScript Plugin for ElasticSearch'
|
||||
licenses {
|
||||
license {
|
||||
name 'The Apache Software License, Version 2.0'
|
||||
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
distribution 'repo'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection 'git://github.com/elasticsearch/elasticsearch.git'
|
||||
developerConnection 'git@github.com:elasticsearch/elasticsearch.git'
|
||||
url 'http://github.com/elasticsearch/elasticsearch'
|
||||
}
|
||||
}
|
||||
|
||||
pom.whenConfigured {pom ->
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> dep.scope != 'test' } // removes the test scoped ones
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eclipseClasspath {
|
||||
defaultOutputDir = file('build/eclipse-build')
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
plugin=org.elasticsearch.plugin.javascript.JavaScriptPlugin
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.javascript;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.javascript.JavaScriptScriptEngineService;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JavaScriptPlugin extends AbstractPlugin {
|
||||
|
||||
@Override public String name() {
|
||||
return "lang-javascript";
|
||||
}
|
||||
|
||||
@Override public String description() {
|
||||
return "JavaScript plugin allowing to add javascript scripting support";
|
||||
}
|
||||
|
||||
@Override public void processModule(Module module) {
|
||||
if (module instanceof ScriptModule) {
|
||||
((ScriptModule) module).addScriptEngine(JavaScriptScriptEngineService.class);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,279 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.script.javascript.support.NativeList;
|
||||
import org.elasticsearch.script.javascript.support.NativeMap;
|
||||
import org.elasticsearch.script.javascript.support.ScriptValueConverter;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
import org.mozilla.javascript.Context;
|
||||
import org.mozilla.javascript.Script;
|
||||
import org.mozilla.javascript.Scriptable;
|
||||
import org.mozilla.javascript.ScriptableObject;
|
||||
import org.mozilla.javascript.WrapFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JavaScriptScriptEngineService extends AbstractComponent implements ScriptEngineService {
|
||||
|
||||
private final AtomicLong counter = new AtomicLong();
|
||||
|
||||
private static WrapFactory wrapFactory = new CustomWrapFactory();
|
||||
|
||||
private final int optimizationLevel;
|
||||
|
||||
private Scriptable globalScope;
|
||||
|
||||
@Inject public JavaScriptScriptEngineService(Settings settings) {
|
||||
super(settings);
|
||||
|
||||
this.optimizationLevel = componentSettings.getAsInt("optimization_level", 1);
|
||||
|
||||
Context ctx = Context.enter();
|
||||
try {
|
||||
ctx.setWrapFactory(wrapFactory);
|
||||
globalScope = ctx.initStandardObjects(null, true);
|
||||
} finally {
|
||||
Context.exit();
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void close() {
|
||||
|
||||
}
|
||||
|
||||
@Override public String[] types() {
|
||||
return new String[]{"js", "javascript"};
|
||||
}
|
||||
|
||||
@Override public String[] extensions() {
|
||||
return new String[]{"js"};
|
||||
}
|
||||
|
||||
@Override public Object compile(String script) {
|
||||
Context ctx = Context.enter();
|
||||
try {
|
||||
ctx.setWrapFactory(wrapFactory);
|
||||
ctx.setOptimizationLevel(optimizationLevel);
|
||||
return ctx.compileString(script, generateScriptName(), 1, null);
|
||||
} finally {
|
||||
Context.exit();
|
||||
}
|
||||
}
|
||||
|
||||
@Override public ExecutableScript executable(Object compiledScript, Map<String, Object> vars) {
|
||||
Context ctx = Context.enter();
|
||||
try {
|
||||
ctx.setWrapFactory(wrapFactory);
|
||||
|
||||
Scriptable scope = ctx.newObject(globalScope);
|
||||
scope.setPrototype(globalScope);
|
||||
scope.setParentScope(null);
|
||||
for (Map.Entry<String, Object> entry : vars.entrySet()) {
|
||||
ScriptableObject.putProperty(scope, entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
return new JavaScriptExecutableScript((Script) compiledScript, scope);
|
||||
} finally {
|
||||
Context.exit();
|
||||
}
|
||||
}
|
||||
|
||||
@Override public SearchScript search(Object compiledScript, SearchLookup lookup, @Nullable Map<String, Object> vars) {
|
||||
Context ctx = Context.enter();
|
||||
try {
|
||||
ctx.setWrapFactory(wrapFactory);
|
||||
|
||||
Scriptable scope = ctx.newObject(globalScope);
|
||||
scope.setPrototype(globalScope);
|
||||
scope.setParentScope(null);
|
||||
|
||||
for (Map.Entry<String, Object> entry : lookup.asMap().entrySet()) {
|
||||
ScriptableObject.putProperty(scope, entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
if (vars != null) {
|
||||
for (Map.Entry<String, Object> entry : vars.entrySet()) {
|
||||
ScriptableObject.putProperty(scope, entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
return new JavaScriptSearchScript((Script) compiledScript, scope, lookup);
|
||||
} finally {
|
||||
Context.exit();
|
||||
}
|
||||
}
|
||||
|
||||
@Override public Object execute(Object compiledScript, Map<String, Object> vars) {
|
||||
Context ctx = Context.enter();
|
||||
ctx.setWrapFactory(wrapFactory);
|
||||
try {
|
||||
Script script = (Script) compiledScript;
|
||||
Scriptable scope = ctx.newObject(globalScope);
|
||||
scope.setPrototype(globalScope);
|
||||
scope.setParentScope(null);
|
||||
|
||||
for (Map.Entry<String, Object> entry : vars.entrySet()) {
|
||||
ScriptableObject.putProperty(scope, entry.getKey(), entry.getValue());
|
||||
}
|
||||
Object ret = script.exec(ctx, scope);
|
||||
return ScriptValueConverter.unwrapValue(ret);
|
||||
} finally {
|
||||
Context.exit();
|
||||
}
|
||||
}
|
||||
|
||||
@Override public Object unwrap(Object value) {
|
||||
return ScriptValueConverter.unwrapValue(value);
|
||||
}
|
||||
|
||||
private String generateScriptName() {
|
||||
return "Script" + counter.incrementAndGet() + ".js";
|
||||
}
|
||||
|
||||
public static class JavaScriptExecutableScript implements ExecutableScript {
|
||||
|
||||
private final Script script;
|
||||
|
||||
private final Scriptable scope;
|
||||
|
||||
public JavaScriptExecutableScript(Script script, Scriptable scope) {
|
||||
this.script = script;
|
||||
this.scope = scope;
|
||||
}
|
||||
|
||||
@Override public Object run() {
|
||||
Context ctx = Context.enter();
|
||||
try {
|
||||
ctx.setWrapFactory(wrapFactory);
|
||||
return ScriptValueConverter.unwrapValue(script.exec(ctx, scope));
|
||||
} finally {
|
||||
Context.exit();
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void setNextVar(String name, Object value) {
|
||||
ScriptableObject.putProperty(scope, name, value);
|
||||
}
|
||||
|
||||
@Override public Object unwrap(Object value) {
|
||||
return ScriptValueConverter.unwrapValue(value);
|
||||
}
|
||||
}
|
||||
|
||||
public static class JavaScriptSearchScript implements SearchScript {
|
||||
|
||||
private final Script script;
|
||||
|
||||
private final Scriptable scope;
|
||||
|
||||
private final SearchLookup lookup;
|
||||
|
||||
public JavaScriptSearchScript(Script script, Scriptable scope, SearchLookup lookup) {
|
||||
this.script = script;
|
||||
this.scope = scope;
|
||||
this.lookup = lookup;
|
||||
}
|
||||
|
||||
@Override public void setScorer(Scorer scorer) {
|
||||
lookup.setScorer(scorer);
|
||||
}
|
||||
|
||||
@Override public void setNextReader(IndexReader reader) {
|
||||
lookup.setNextReader(reader);
|
||||
}
|
||||
|
||||
@Override public void setNextDocId(int doc) {
|
||||
lookup.setNextDocId(doc);
|
||||
}
|
||||
|
||||
@Override public void setNextScore(float score) {
|
||||
ScriptableObject.putProperty(scope, "_score", score);
|
||||
}
|
||||
|
||||
@Override public void setNextVar(String name, Object value) {
|
||||
ScriptableObject.putProperty(scope, name, value);
|
||||
}
|
||||
|
||||
@Override public void setNextSource(Map<String, Object> source) {
|
||||
lookup.source().setNextSource(source);
|
||||
}
|
||||
|
||||
@Override public Object run() {
|
||||
Context ctx = Context.enter();
|
||||
try {
|
||||
ctx.setWrapFactory(wrapFactory);
|
||||
return ScriptValueConverter.unwrapValue(script.exec(ctx, scope));
|
||||
} finally {
|
||||
Context.exit();
|
||||
}
|
||||
}
|
||||
|
||||
@Override public float runAsFloat() {
|
||||
return ((Number) run()).floatValue();
|
||||
}
|
||||
|
||||
@Override public long runAsLong() {
|
||||
return ((Number) run()).longValue();
|
||||
}
|
||||
|
||||
@Override public double runAsDouble() {
|
||||
return ((Number) run()).doubleValue();
|
||||
}
|
||||
|
||||
@Override public Object unwrap(Object value) {
|
||||
return ScriptValueConverter.unwrapValue(value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap Factory for Rhino Script Engine
|
||||
*/
|
||||
public static class CustomWrapFactory extends WrapFactory {
|
||||
|
||||
public CustomWrapFactory() {
|
||||
setJavaPrimitiveWrap(false); // RingoJS does that..., claims its annoying...
|
||||
}
|
||||
|
||||
public Scriptable wrapAsJavaObject(Context cx, Scriptable scope, Object javaObject, Class staticType) {
|
||||
if (javaObject instanceof Map) {
|
||||
return new NativeMap(scope, (Map) javaObject);
|
||||
}
|
||||
if (javaObject instanceof List) {
|
||||
return new NativeList(scope, (List) javaObject);
|
||||
}
|
||||
return super.wrapAsJavaObject(cx, scope, javaObject, staticType);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,207 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript.support;
|
||||
|
||||
import org.mozilla.javascript.Scriptable;
|
||||
import org.mozilla.javascript.Undefined;
|
||||
import org.mozilla.javascript.Wrapper;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class NativeList implements Scriptable, Wrapper {
|
||||
private static final long serialVersionUID = 3664761893203964569L;
|
||||
|
||||
private List<Object> list;
|
||||
private Scriptable parentScope;
|
||||
private Scriptable prototype;
|
||||
|
||||
|
||||
public static NativeList wrap(Scriptable scope, List<Object> list) {
|
||||
return new NativeList(scope, list);
|
||||
}
|
||||
|
||||
public NativeList(Scriptable scope, List<Object> list) {
|
||||
this.parentScope = scope;
|
||||
this.list = list;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Wrapper#unwrap()
|
||||
*/
|
||||
|
||||
public Object unwrap() {
|
||||
return list;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getClassName()
|
||||
*/
|
||||
|
||||
public String getClassName() {
|
||||
return "NativeList";
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#get(java.lang.String, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public Object get(String name, Scriptable start) {
|
||||
if ("length".equals(name)) {
|
||||
return list.size();
|
||||
} else {
|
||||
return Undefined.instance;
|
||||
}
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#get(int, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public Object get(int index, Scriptable start) {
|
||||
if (index < 0 || index >= list.size()) {
|
||||
return Undefined.instance;
|
||||
}
|
||||
return list.get(index);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#has(java.lang.String, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public boolean has(String name, Scriptable start) {
|
||||
if ("length".equals(name)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#has(int, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public boolean has(int index, Scriptable start) {
|
||||
return index >= 0 && index < list.size();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#put(java.lang.String, org.mozilla.javascript.Scriptable, java.lang.Object)
|
||||
*/
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void put(String name, Scriptable start, Object value) {
|
||||
// do nothing here...
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#put(int, org.mozilla.javascript.Scriptable, java.lang.Object)
|
||||
*/
|
||||
|
||||
public void put(int index, Scriptable start, Object value) {
|
||||
if (index == list.size()) {
|
||||
list.add(value);
|
||||
} else {
|
||||
list.set(index, value);
|
||||
}
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#delete(java.lang.String)
|
||||
*/
|
||||
|
||||
public void delete(String name) {
|
||||
// nothing here
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#delete(int)
|
||||
*/
|
||||
|
||||
public void delete(int index) {
|
||||
list.remove(index);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getPrototype()
|
||||
*/
|
||||
|
||||
public Scriptable getPrototype() {
|
||||
return this.prototype;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#setPrototype(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public void setPrototype(Scriptable prototype) {
|
||||
this.prototype = prototype;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getParentScope()
|
||||
*/
|
||||
|
||||
public Scriptable getParentScope() {
|
||||
return this.parentScope;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#setParentScope(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public void setParentScope(Scriptable parent) {
|
||||
this.parentScope = parent;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getIds()
|
||||
*/
|
||||
|
||||
public Object[] getIds() {
|
||||
int size = list.size();
|
||||
Object[] ids = new Object[size];
|
||||
for (int i = 0; i < size; ++i) {
|
||||
ids[i] = i;
|
||||
}
|
||||
return ids;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getDefaultValue(java.lang.Class)
|
||||
*/
|
||||
|
||||
public Object getDefaultValue(Class hint) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#hasInstance(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public boolean hasInstance(Scriptable value) {
|
||||
if (!(value instanceof Wrapper))
|
||||
return false;
|
||||
Object instance = ((Wrapper) value).unwrap();
|
||||
return List.class.isInstance(instance);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,223 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript.support;
|
||||
|
||||
import org.mozilla.javascript.Scriptable;
|
||||
import org.mozilla.javascript.Wrapper;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Wrapper for exposing maps in Rhino scripts.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class NativeMap implements Scriptable, Wrapper {
|
||||
private static final long serialVersionUID = 3664761893203964569L;
|
||||
|
||||
private Map<Object, Object> map;
|
||||
private Scriptable parentScope;
|
||||
private Scriptable prototype;
|
||||
|
||||
|
||||
/**
|
||||
* Construct
|
||||
*
|
||||
* @param scope
|
||||
* @param map
|
||||
* @return native map
|
||||
*/
|
||||
public static NativeMap wrap(Scriptable scope, Map<Object, Object> map) {
|
||||
return new NativeMap(scope, map);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct
|
||||
*
|
||||
* @param scope
|
||||
* @param map
|
||||
*/
|
||||
public NativeMap(Scriptable scope, Map<Object, Object> map) {
|
||||
this.parentScope = scope;
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Wrapper#unwrap()
|
||||
*/
|
||||
|
||||
public Object unwrap() {
|
||||
return map;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getClassName()
|
||||
*/
|
||||
|
||||
public String getClassName() {
|
||||
return "NativeMap";
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#get(java.lang.String, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public Object get(String name, Scriptable start) {
|
||||
// get the property from the underlying QName map
|
||||
if ("length".equals(name)) {
|
||||
return map.size();
|
||||
} else {
|
||||
return map.get(name);
|
||||
}
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#get(int, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public Object get(int index, Scriptable start) {
|
||||
Object value = null;
|
||||
int i = 0;
|
||||
Iterator itrValues = map.values().iterator();
|
||||
while (i++ <= index && itrValues.hasNext()) {
|
||||
value = itrValues.next();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#has(java.lang.String, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public boolean has(String name, Scriptable start) {
|
||||
// locate the property in the underlying map
|
||||
return map.containsKey(name);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#has(int, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public boolean has(int index, Scriptable start) {
|
||||
return (index >= 0 && map.values().size() > index);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#put(java.lang.String, org.mozilla.javascript.Scriptable, java.lang.Object)
|
||||
*/
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void put(String name, Scriptable start, Object value) {
|
||||
map.put(name, value);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#put(int, org.mozilla.javascript.Scriptable, java.lang.Object)
|
||||
*/
|
||||
|
||||
public void put(int index, Scriptable start, Object value) {
|
||||
// TODO: implement?
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#delete(java.lang.String)
|
||||
*/
|
||||
|
||||
public void delete(String name) {
|
||||
map.remove(name);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#delete(int)
|
||||
*/
|
||||
|
||||
public void delete(int index) {
|
||||
int i = 0;
|
||||
Iterator itrKeys = map.keySet().iterator();
|
||||
while (i <= index && itrKeys.hasNext()) {
|
||||
Object key = itrKeys.next();
|
||||
if (i == index) {
|
||||
map.remove(key);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getPrototype()
|
||||
*/
|
||||
|
||||
public Scriptable getPrototype() {
|
||||
return this.prototype;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#setPrototype(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public void setPrototype(Scriptable prototype) {
|
||||
this.prototype = prototype;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getParentScope()
|
||||
*/
|
||||
|
||||
public Scriptable getParentScope() {
|
||||
return this.parentScope;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#setParentScope(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public void setParentScope(Scriptable parent) {
|
||||
this.parentScope = parent;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getIds()
|
||||
*/
|
||||
|
||||
public Object[] getIds() {
|
||||
return map.keySet().toArray();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getDefaultValue(java.lang.Class)
|
||||
*/
|
||||
|
||||
public Object getDefaultValue(Class hint) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#hasInstance(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public boolean hasInstance(Scriptable value) {
|
||||
if (!(value instanceof Wrapper))
|
||||
return false;
|
||||
Object instance = ((Wrapper) value).unwrap();
|
||||
return Map.class.isInstance(instance);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,183 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript.support;
|
||||
|
||||
import org.mozilla.javascript.*;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Value Converter to marshal objects between Java and Javascript.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public final class ScriptValueConverter {
|
||||
private static final String TYPE_DATE = "Date";
|
||||
|
||||
|
||||
/**
|
||||
* Private constructor - methods are static
|
||||
*/
|
||||
private ScriptValueConverter() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an object from a script wrapper value to a serializable value valid outside
|
||||
* of the Rhino script processor context.
|
||||
*
|
||||
* This includes converting JavaScript Array objects to Lists of valid objects.
|
||||
*
|
||||
* @param value Value to convert from script wrapper object to external object value.
|
||||
* @return unwrapped and converted value.
|
||||
*/
|
||||
public static Object unwrapValue(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
} else if (value instanceof Wrapper) {
|
||||
// unwrap a Java object from a JavaScript wrapper
|
||||
// recursively call this method to convert the unwrapped value
|
||||
value = unwrapValue(((Wrapper) value).unwrap());
|
||||
} else if (value instanceof IdScriptableObject) {
|
||||
// check for special case Native object wrappers
|
||||
String className = ((IdScriptableObject) value).getClassName();
|
||||
// check for special case of the String object
|
||||
if ("String".equals(className)) {
|
||||
value = Context.jsToJava(value, String.class);
|
||||
}
|
||||
// check for special case of a Date object
|
||||
else if ("Date".equals(className)) {
|
||||
value = Context.jsToJava(value, Date.class);
|
||||
} else {
|
||||
// a scriptable object will probably indicate a multi-value property set
|
||||
// set using a JavaScript associative Array object
|
||||
Scriptable values = (Scriptable) value;
|
||||
Object[] propIds = values.getIds();
|
||||
|
||||
// is it a JavaScript associative Array object using Integer indexes?
|
||||
if (values instanceof NativeArray && isArray(propIds)) {
|
||||
// convert JavaScript array of values to a List of Serializable objects
|
||||
List<Object> propValues = new ArrayList<Object>(propIds.length);
|
||||
for (int i = 0; i < propIds.length; i++) {
|
||||
// work on each key in turn
|
||||
Integer propId = (Integer) propIds[i];
|
||||
|
||||
// we are only interested in keys that indicate a list of values
|
||||
if (propId instanceof Integer) {
|
||||
// get the value out for the specified key
|
||||
Object val = values.get(propId, values);
|
||||
// recursively call this method to convert the value
|
||||
propValues.add(unwrapValue(val));
|
||||
}
|
||||
}
|
||||
|
||||
value = propValues;
|
||||
} else {
|
||||
// any other JavaScript object that supports properties - convert to a Map of objects
|
||||
Map<String, Object> propValues = new HashMap<String, Object>(propIds.length);
|
||||
for (int i = 0; i < propIds.length; i++) {
|
||||
// work on each key in turn
|
||||
Object propId = propIds[i];
|
||||
|
||||
// we are only interested in keys that indicate a list of values
|
||||
if (propId instanceof String) {
|
||||
// get the value out for the specified key
|
||||
Object val = values.get((String) propId, values);
|
||||
// recursively call this method to convert the value
|
||||
propValues.put((String) propId, unwrapValue(val));
|
||||
}
|
||||
}
|
||||
value = propValues;
|
||||
}
|
||||
}
|
||||
} else if (value instanceof Object[]) {
|
||||
// convert back a list Object Java values
|
||||
Object[] array = (Object[]) value;
|
||||
ArrayList<Object> list = new ArrayList<Object>(array.length);
|
||||
for (int i = 0; i < array.length; i++) {
|
||||
list.add(unwrapValue(array[i]));
|
||||
}
|
||||
value = list;
|
||||
} else if (value instanceof Map) {
|
||||
// ensure each value in the Map is unwrapped (which may have been an unwrapped NativeMap!)
|
||||
Map<Object, Object> map = (Map<Object, Object>) value;
|
||||
Map<Object, Object> copyMap = new HashMap<Object, Object>(map.size());
|
||||
for (Object key : map.keySet()) {
|
||||
copyMap.put(key, unwrapValue(map.get(key)));
|
||||
}
|
||||
value = copyMap;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an object from any repository serialized value to a valid script object.
|
||||
* This includes converting Collection multi-value properties into JavaScript Array objects.
|
||||
*
|
||||
* @param scope Scripting scope
|
||||
* @param value Property value
|
||||
* @return Value safe for scripting usage
|
||||
*/
|
||||
public static Object wrapValue(Scriptable scope, Object value) {
|
||||
// perform conversions from Java objects to JavaScript scriptable instances
|
||||
if (value == null) {
|
||||
return null;
|
||||
} else if (value instanceof Date) {
|
||||
// convert Date to JavaScript native Date object
|
||||
// call the "Date" constructor on the root scope object - passing in the millisecond
|
||||
// value from the Java date - this will construct a JavaScript Date with the same value
|
||||
Date date = (Date) value;
|
||||
value = ScriptRuntime.newObject(
|
||||
Context.getCurrentContext(), scope, TYPE_DATE, new Object[]{date.getTime()});
|
||||
} else if (value instanceof Collection) {
|
||||
// recursively convert each value in the collection
|
||||
Collection<Object> collection = (Collection<Object>) value;
|
||||
Object[] array = new Object[collection.size()];
|
||||
int index = 0;
|
||||
for (Object obj : collection) {
|
||||
array[index++] = wrapValue(scope, obj);
|
||||
}
|
||||
// convert array to a native JavaScript Array
|
||||
value = Context.getCurrentContext().newArray(scope, array);
|
||||
} else if (value instanceof Map) {
|
||||
value = new NativeMap(scope, (Map) value);
|
||||
}
|
||||
|
||||
// simple numbers, strings and booleans are wrapped automatically by Rhino
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Look at the id's of a native array and try to determine whether it's actually an Array or a Hashmap
|
||||
*
|
||||
* @param ids id's of the native array
|
||||
* @return boolean true if it's an array, false otherwise (ie it's a map)
|
||||
*/
|
||||
private static boolean isArray(final Object[] ids) {
|
||||
boolean result = true;
|
||||
for (int i = 0; i < ids.length; i++) {
|
||||
if (ids[i] instanceof Integer == false) {
|
||||
result = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -1,188 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript.support;
|
||||
|
||||
import org.mozilla.javascript.Scriptable;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Implementation of a Scriptable Map. This is the best choice for maps that want to represent
|
||||
* JavaScript associative arrays - allowing access via key and integer index. It maintains and
|
||||
* respects insertion order of the elements and allows either string or integer keys.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class ScriptableLinkedHashMap<K, V> extends LinkedHashMap<K, V> implements ScriptableMap<K, V> {
|
||||
private static final long serialVersionUID = 3774167893214964123L;
|
||||
|
||||
private Scriptable parentScope;
|
||||
private Scriptable prototype;
|
||||
|
||||
|
||||
public ScriptableLinkedHashMap() {
|
||||
}
|
||||
|
||||
public ScriptableLinkedHashMap(int initialCapacity) {
|
||||
super(initialCapacity);
|
||||
}
|
||||
|
||||
public ScriptableLinkedHashMap(Map<K, V> source) {
|
||||
super(source);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#getClassName()
|
||||
*/
|
||||
public String getClassName() {
|
||||
return "ScriptableMap";
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#get(java.lang.String, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
public Object get(String name, Scriptable start) {
|
||||
// get the property from the underlying QName map
|
||||
if ("length".equals(name)) {
|
||||
return this.size();
|
||||
} else {
|
||||
return get(name);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#get(int, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
public Object get(int index, Scriptable start) {
|
||||
Object value = null;
|
||||
int i = 0;
|
||||
Iterator itrValues = this.values().iterator();
|
||||
while (i++ <= index && itrValues.hasNext()) {
|
||||
value = itrValues.next();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#has(java.lang.String, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
public boolean has(String name, Scriptable start) {
|
||||
// locate the property in the underlying map
|
||||
return containsKey(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#has(int, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
public boolean has(int index, Scriptable start) {
|
||||
return (index >= 0 && this.values().size() > index);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#put(java.lang.String, org.mozilla.javascript.Scriptable, java.lang.Object)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public void put(String name, Scriptable start, Object value) {
|
||||
// add the property to the underlying QName map
|
||||
put((K) name, (V) value);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#put(int, org.mozilla.javascript.Scriptable, java.lang.Object)
|
||||
*/
|
||||
public void put(int index, Scriptable start, Object value) {
|
||||
// TODO: implement?
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#delete(java.lang.String)
|
||||
*/
|
||||
public void delete(String name) {
|
||||
// remove the property from the underlying QName map
|
||||
remove(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#delete(int)
|
||||
*/
|
||||
public void delete(int index) {
|
||||
int i = 0;
|
||||
Iterator itrKeys = this.keySet().iterator();
|
||||
while (i <= index && itrKeys.hasNext()) {
|
||||
Object key = itrKeys.next();
|
||||
if (i == index) {
|
||||
remove(key);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#getPrototype()
|
||||
*/
|
||||
public Scriptable getPrototype() {
|
||||
return this.prototype;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#setPrototype(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
public void setPrototype(Scriptable prototype) {
|
||||
this.prototype = prototype;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#getParentScope()
|
||||
*/
|
||||
public Scriptable getParentScope() {
|
||||
return this.parentScope;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#setParentScope(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
public void setParentScope(Scriptable parent) {
|
||||
this.parentScope = parent;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#getIds()
|
||||
*/
|
||||
public Object[] getIds() {
|
||||
return keySet().toArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#getDefaultValue(java.lang.Class)
|
||||
*/
|
||||
public Object getDefaultValue(Class hint) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see org.mozilla.javascript.Scriptable#hasInstance(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
public boolean hasInstance(Scriptable instance) {
|
||||
return instance instanceof ScriptableLinkedHashMap;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript.support;
|
||||
|
||||
import org.mozilla.javascript.Scriptable;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Contract to be implemented by classes providing Map like collections to JavaScript.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public interface ScriptableMap<K, V> extends Scriptable, Map<K, V> {
|
||||
}
|
|
@ -1,342 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript.support;
|
||||
|
||||
import org.mozilla.javascript.Scriptable;
|
||||
import org.mozilla.javascript.Wrapper;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Implementation of a Scriptable Map. This is the best choice where you want values to be
|
||||
* persisted directly to an underlying map supplied on construction. The class automatically
|
||||
* wraps/unwraps JS objects as they enter/leave the underlying map via the Scriptable interface
|
||||
* methods - objects are untouched if accessed via the usual Map interface methods.
|
||||
*
|
||||
* <p>Access should be by string key only - not integer index - unless you are sure the wrapped
|
||||
* map will maintain insertion order of the elements.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class ScriptableWrappedMap implements ScriptableMap, Wrapper {
|
||||
private Map map;
|
||||
private Scriptable parentScope;
|
||||
private Scriptable prototype;
|
||||
|
||||
|
||||
/**
|
||||
* Construction
|
||||
*
|
||||
* @param scope
|
||||
* @param map
|
||||
* @return scriptable wrapped map
|
||||
*/
|
||||
public static ScriptableWrappedMap wrap(Scriptable scope, Map<Object, Object> map) {
|
||||
return new ScriptableWrappedMap(scope, map);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct
|
||||
*
|
||||
* @param map
|
||||
*/
|
||||
public ScriptableWrappedMap(Map map) {
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct
|
||||
*
|
||||
* @param scope
|
||||
* @param map
|
||||
*/
|
||||
public ScriptableWrappedMap(Scriptable scope, Map map) {
|
||||
this.parentScope = scope;
|
||||
this.map = map;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Wrapper#unwrap()
|
||||
*/
|
||||
|
||||
public Object unwrap() {
|
||||
return map;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getClassName()
|
||||
*/
|
||||
|
||||
public String getClassName() {
|
||||
return "ScriptableWrappedMap";
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#get(java.lang.String, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public Object get(String name, Scriptable start) {
|
||||
// get the property from the underlying QName map
|
||||
if ("length".equals(name)) {
|
||||
return map.size();
|
||||
} else {
|
||||
return ScriptValueConverter.wrapValue(this.parentScope != null ? this.parentScope : start, map.get(name));
|
||||
}
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#get(int, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public Object get(int index, Scriptable start) {
|
||||
Object value = null;
|
||||
int i = 0;
|
||||
Iterator itrValues = map.values().iterator();
|
||||
while (i++ <= index && itrValues.hasNext()) {
|
||||
value = itrValues.next();
|
||||
}
|
||||
return ScriptValueConverter.wrapValue(this.parentScope != null ? this.parentScope : start, value);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#has(java.lang.String, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public boolean has(String name, Scriptable start) {
|
||||
// locate the property in the underlying map
|
||||
return map.containsKey(name);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#has(int, org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public boolean has(int index, Scriptable start) {
|
||||
return (index >= 0 && map.values().size() > index);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#put(java.lang.String, org.mozilla.javascript.Scriptable, java.lang.Object)
|
||||
*/
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void put(String name, Scriptable start, Object value) {
|
||||
map.put(name, ScriptValueConverter.unwrapValue(value));
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#put(int, org.mozilla.javascript.Scriptable, java.lang.Object)
|
||||
*/
|
||||
|
||||
public void put(int index, Scriptable start, Object value) {
|
||||
// TODO: implement?
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#delete(java.lang.String)
|
||||
*/
|
||||
|
||||
public void delete(String name) {
|
||||
map.remove(name);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#delete(int)
|
||||
*/
|
||||
|
||||
public void delete(int index) {
|
||||
int i = 0;
|
||||
Iterator itrKeys = map.keySet().iterator();
|
||||
while (i <= index && itrKeys.hasNext()) {
|
||||
Object key = itrKeys.next();
|
||||
if (i == index) {
|
||||
map.remove(key);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getPrototype()
|
||||
*/
|
||||
|
||||
public Scriptable getPrototype() {
|
||||
return this.prototype;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#setPrototype(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public void setPrototype(Scriptable prototype) {
|
||||
this.prototype = prototype;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getParentScope()
|
||||
*/
|
||||
|
||||
public Scriptable getParentScope() {
|
||||
return this.parentScope;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#setParentScope(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public void setParentScope(Scriptable parent) {
|
||||
this.parentScope = parent;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getIds()
|
||||
*/
|
||||
|
||||
public Object[] getIds() {
|
||||
return map.keySet().toArray();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#getDefaultValue(java.lang.Class)
|
||||
*/
|
||||
|
||||
public Object getDefaultValue(Class hint) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.mozilla.javascript.Scriptable#hasInstance(org.mozilla.javascript.Scriptable)
|
||||
*/
|
||||
|
||||
public boolean hasInstance(Scriptable value) {
|
||||
if (!(value instanceof Wrapper))
|
||||
return false;
|
||||
Object instance = ((Wrapper) value).unwrap();
|
||||
return Map.class.isInstance(instance);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#clear()
|
||||
*/
|
||||
|
||||
public void clear() {
|
||||
this.map.clear();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#containsKey(java.lang.Object)
|
||||
*/
|
||||
|
||||
public boolean containsKey(Object key) {
|
||||
return this.map.containsKey(key);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#containsValue(java.lang.Object)
|
||||
*/
|
||||
|
||||
public boolean containsValue(Object value) {
|
||||
return this.map.containsValue(value);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#entrySet()
|
||||
*/
|
||||
|
||||
public Set entrySet() {
|
||||
return this.map.entrySet();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#get(java.lang.Object)
|
||||
*/
|
||||
|
||||
public Object get(Object key) {
|
||||
return this.map.get(key);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#isEmpty()
|
||||
*/
|
||||
|
||||
public boolean isEmpty() {
|
||||
return (this.map.size() == 0);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#keySet()
|
||||
*/
|
||||
|
||||
public Set keySet() {
|
||||
return this.map.keySet();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#put(java.lang.Object, java.lang.Object)
|
||||
*/
|
||||
|
||||
public Object put(Object key, Object value) {
|
||||
return this.map.put(key, value);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#putAll(java.util.Map)
|
||||
*/
|
||||
|
||||
public void putAll(Map t) {
|
||||
this.map.putAll(t);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#remove(java.lang.Object)
|
||||
*/
|
||||
|
||||
public Object remove(Object key) {
|
||||
return this.map.remove(key);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#size()
|
||||
*/
|
||||
|
||||
public int size() {
|
||||
return this.map.size();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.util.Map#values()
|
||||
*/
|
||||
|
||||
public Collection values() {
|
||||
return this.map.values();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return (this.map != null ? this.map.toString() : super.toString());
|
||||
}
|
||||
}
|
|
@ -1,163 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript;
|
||||
|
||||
import org.elasticsearch.common.collect.Lists;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.testng.annotations.AfterClass;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JavaScriptScriptEngineTests {
|
||||
|
||||
private JavaScriptScriptEngineService se;
|
||||
|
||||
@BeforeClass public void setup() {
|
||||
se = new JavaScriptScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
}
|
||||
|
||||
@AfterClass public void close() {
|
||||
se.close();
|
||||
}
|
||||
|
||||
@Test public void testSimpleEquation() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Object o = se.execute(se.compile("1 + 2"), vars);
|
||||
assertThat(((Number) o).intValue(), equalTo(3));
|
||||
}
|
||||
|
||||
@Test public void testMapAccess() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
|
||||
Map<String, Object> obj2 = MapBuilder.<String, Object>newMapBuilder().put("prop2", "value2").map();
|
||||
Map<String, Object> obj1 = MapBuilder.<String, Object>newMapBuilder().put("prop1", "value1").put("obj2", obj2).put("l", Lists.newArrayList("2", "1")).map();
|
||||
vars.put("obj1", obj1);
|
||||
Object o = se.execute(se.compile("obj1"), vars);
|
||||
assertThat(o, instanceOf(Map.class));
|
||||
obj1 = (Map<String, Object>) o;
|
||||
assertThat((String) obj1.get("prop1"), equalTo("value1"));
|
||||
assertThat((String) ((Map<String, Object>) obj1.get("obj2")).get("prop2"), equalTo("value2"));
|
||||
|
||||
o = se.execute(se.compile("obj1.l[0]"), vars);
|
||||
assertThat(((String) o), equalTo("2"));
|
||||
}
|
||||
|
||||
@Test public void testJavaScriptObjectToMap() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Object o = se.execute(se.compile("var obj1 = {}; obj1.prop1 = 'value1'; obj1.obj2 = {}; obj1.obj2.prop2 = 'value2'; obj1"), vars);
|
||||
Map obj1 = (Map) o;
|
||||
assertThat((String) obj1.get("prop1"), equalTo("value1"));
|
||||
assertThat((String) ((Map<String, Object>) obj1.get("obj2")).get("prop2"), equalTo("value2"));
|
||||
}
|
||||
|
||||
@Test public void testJavaScriptObjectMapInter() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Map<String, Object> ctx = new HashMap<String, Object>();
|
||||
Map<String, Object> obj1 = new HashMap<String, Object>();
|
||||
obj1.put("prop1", "value1");
|
||||
ctx.put("obj1", obj1);
|
||||
vars.put("ctx", ctx);
|
||||
|
||||
se.execute(se.compile("ctx.obj2 = {}; ctx.obj2.prop2 = 'value2'; ctx.obj1.prop1 = 'uvalue1'"), vars);
|
||||
ctx = (Map<String, Object>) se.unwrap(vars.get("ctx"));
|
||||
assertThat(ctx.containsKey("obj1"), equalTo(true));
|
||||
assertThat((String) ((Map<String, Object>) ctx.get("obj1")).get("prop1"), equalTo("uvalue1"));
|
||||
assertThat(ctx.containsKey("obj2"), equalTo(true));
|
||||
assertThat((String) ((Map<String, Object>) ctx.get("obj2")).get("prop2"), equalTo("value2"));
|
||||
}
|
||||
|
||||
@Test public void testJavaScriptInnerArrayCreation() {
|
||||
Map<String, Object> ctx = new HashMap<String, Object>();
|
||||
Map<String, Object> doc = new HashMap<String, Object>();
|
||||
ctx.put("doc", doc);
|
||||
|
||||
Object complied = se.compile("ctx.doc.field1 = ['value1', 'value2']");
|
||||
ExecutableScript script = se.executable(complied, new HashMap<String, Object>());
|
||||
script.setNextVar("ctx", ctx);
|
||||
script.run();
|
||||
|
||||
Map<String, Object> unwrap = (Map<String, Object>) script.unwrap(ctx);
|
||||
|
||||
assertThat(((Map) unwrap.get("doc")).get("field1"), instanceOf(List.class));
|
||||
}
|
||||
|
||||
@Test public void testAccessListInScript() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Map<String, Object> obj2 = MapBuilder.<String, Object>newMapBuilder().put("prop2", "value2").map();
|
||||
Map<String, Object> obj1 = MapBuilder.<String, Object>newMapBuilder().put("prop1", "value1").put("obj2", obj2).map();
|
||||
vars.put("l", Lists.newArrayList("1", "2", "3", obj1));
|
||||
|
||||
Object o = se.execute(se.compile("l.length"), vars);
|
||||
assertThat(((Number) o).intValue(), equalTo(4));
|
||||
|
||||
o = se.execute(se.compile("l[0]"), vars);
|
||||
assertThat(((String) o), equalTo("1"));
|
||||
|
||||
o = se.execute(se.compile("l[3]"), vars);
|
||||
obj1 = (Map<String, Object>) o;
|
||||
assertThat((String) obj1.get("prop1"), equalTo("value1"));
|
||||
assertThat((String) ((Map<String, Object>) obj1.get("obj2")).get("prop2"), equalTo("value2"));
|
||||
|
||||
o = se.execute(se.compile("l[3].prop1"), vars);
|
||||
assertThat(((String) o), equalTo("value1"));
|
||||
}
|
||||
|
||||
@Test public void testChangingVarsCrossExecution1() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Map<String, Object> ctx = new HashMap<String, Object>();
|
||||
vars.put("ctx", ctx);
|
||||
Object compiledScript = se.compile("ctx.value");
|
||||
|
||||
ExecutableScript script = se.executable(compiledScript, vars);
|
||||
ctx.put("value", 1);
|
||||
Object o = script.run();
|
||||
assertThat(((Number) o).intValue(), equalTo(1));
|
||||
|
||||
ctx.put("value", 2);
|
||||
o = script.run();
|
||||
assertThat(((Number) o).intValue(), equalTo(2));
|
||||
}
|
||||
|
||||
@Test public void testChangingVarsCrossExecution2() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Object compiledScript = se.compile("value");
|
||||
|
||||
ExecutableScript script = se.executable(compiledScript, vars);
|
||||
script.setNextVar("value", 1);
|
||||
Object o = script.run();
|
||||
assertThat(((Number) o).intValue(), equalTo(1));
|
||||
|
||||
script.setNextVar("value", 2);
|
||||
o = script.run();
|
||||
assertThat(((Number) o).intValue(), equalTo(2));
|
||||
}
|
||||
}
|
|
@ -1,168 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.util.concurrent.jsr166y.ThreadLocalRandom;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.CyclicBarrier;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@Test
|
||||
public class JavaScriptScriptMultiThreadedTest {
|
||||
|
||||
protected final ESLogger logger = Loggers.getLogger(getClass());
|
||||
|
||||
@Test public void testExecutableNoRuntimeParams() throws Exception {
|
||||
final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
final Object compiled = se.compile("x + y");
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
|
||||
Thread[] threads = new Thread[50];
|
||||
final CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(new Runnable() {
|
||||
@Override public void run() {
|
||||
try {
|
||||
barrier.await();
|
||||
long x = ThreadLocalRandom.current().nextInt();
|
||||
long y = ThreadLocalRandom.current().nextInt();
|
||||
long addition = x + y;
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
vars.put("x", x);
|
||||
vars.put("y", y);
|
||||
ExecutableScript script = se.executable(compiled, vars);
|
||||
for (int i = 0; i < 100000; i++) {
|
||||
long result = ((Number) script.run()).longValue();
|
||||
assertThat(result, equalTo(addition));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
failed.set(true);
|
||||
logger.error("failed", t);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i].start();
|
||||
}
|
||||
barrier.await();
|
||||
latch.await();
|
||||
assertThat(failed.get(), equalTo(false));
|
||||
}
|
||||
|
||||
|
||||
@Test public void testExecutableWithRuntimeParams() throws Exception {
|
||||
final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
final Object compiled = se.compile("x + y");
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
|
||||
Thread[] threads = new Thread[50];
|
||||
final CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(new Runnable() {
|
||||
@Override public void run() {
|
||||
try {
|
||||
barrier.await();
|
||||
long x = ThreadLocalRandom.current().nextInt();
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
vars.put("x", x);
|
||||
ExecutableScript script = se.executable(compiled, vars);
|
||||
for (int i = 0; i < 100000; i++) {
|
||||
long y = ThreadLocalRandom.current().nextInt();
|
||||
long addition = x + y;
|
||||
script.setNextVar("y", y);
|
||||
long result = ((Number) script.run()).longValue();
|
||||
assertThat(result, equalTo(addition));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
failed.set(true);
|
||||
logger.error("failed", t);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i].start();
|
||||
}
|
||||
barrier.await();
|
||||
latch.await();
|
||||
assertThat(failed.get(), equalTo(false));
|
||||
}
|
||||
|
||||
@Test public void testExecute() throws Exception {
|
||||
final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
final Object compiled = se.compile("x + y");
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
|
||||
Thread[] threads = new Thread[50];
|
||||
final CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(new Runnable() {
|
||||
@Override public void run() {
|
||||
try {
|
||||
barrier.await();
|
||||
Map<String, Object> runtimeVars = new HashMap<String, Object>();
|
||||
for (int i = 0; i < 100000; i++) {
|
||||
long x = ThreadLocalRandom.current().nextInt();
|
||||
long y = ThreadLocalRandom.current().nextInt();
|
||||
long addition = x + y;
|
||||
runtimeVars.put("x", x);
|
||||
runtimeVars.put("y", y);
|
||||
long result = ((Number) se.execute(compiled, runtimeVars)).longValue();
|
||||
assertThat(result, equalTo(addition));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
failed.set(true);
|
||||
logger.error("failed", t);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i].start();
|
||||
}
|
||||
barrier.await();
|
||||
latch.await();
|
||||
assertThat(failed.get(), equalTo(false));
|
||||
}
|
||||
}
|
|
@ -1,261 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.network.NetworkUtils;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.NodeBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.testng.annotations.AfterMethod;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.client.Requests.*;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.*;
|
||||
import static org.elasticsearch.index.query.FilterBuilders.*;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.*;
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JavaScriptScriptSearchTests {
|
||||
|
||||
protected final ESLogger logger = Loggers.getLogger(getClass());
|
||||
|
||||
private Node node;
|
||||
|
||||
private Client client;
|
||||
|
||||
@BeforeMethod public void createNodes() throws Exception {
|
||||
node = NodeBuilder.nodeBuilder().settings(ImmutableSettings.settingsBuilder()
|
||||
.put("cluster.name", "test-cluster-" + NetworkUtils.getLocalAddress())
|
||||
.put("gateway.type", "none")
|
||||
.put("number_of_shards", 1)).node();
|
||||
client = node.client();
|
||||
}
|
||||
|
||||
@AfterMethod public void closeNodes() {
|
||||
client.close();
|
||||
node.close();
|
||||
}
|
||||
|
||||
@Test public void testJavaScriptFilter() throws Exception {
|
||||
client.admin().indices().prepareCreate("test").execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().prepareFlush().execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "2")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().prepareFlush().execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "3")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
logger.info("running doc['num1'].value > 1");
|
||||
SearchResponse response = client.prepareSearch()
|
||||
.setQuery(filteredQuery(matchAllQuery(), scriptFilter("doc['num1'].value > 1").lang("js")))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", "js", "doc['num1'].value", null)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat((Double) response.hits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("3"));
|
||||
assertThat((Double) response.hits().getAt(1).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
|
||||
logger.info("running doc['num1'].value > param1");
|
||||
response = client.prepareSearch()
|
||||
.setQuery(filteredQuery(matchAllQuery(), scriptFilter("doc['num1'].value > param1").lang("js").addParam("param1", 2)))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", "js", "doc['num1'].value", null)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(1l));
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("3"));
|
||||
assertThat((Double) response.hits().getAt(0).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
|
||||
logger.info("running doc['num1'].value > param1");
|
||||
response = client.prepareSearch()
|
||||
.setQuery(filteredQuery(matchAllQuery(), scriptFilter("doc['num1'].value > param1").lang("js").addParam("param1", -1)))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", "js", "doc['num1'].value", null)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(3l));
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("1"));
|
||||
assertThat((Double) response.hits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("2"));
|
||||
assertThat((Double) response.hits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0));
|
||||
assertThat(response.hits().getAt(2).id(), equalTo("3"));
|
||||
assertThat((Double) response.hits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
}
|
||||
|
||||
@Test public void testScriptFieldUsingSource() throws Exception {
|
||||
client.admin().indices().prepareCreate("test").execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.startObject("obj1").field("test", "something").endObject()
|
||||
.startObject("obj2").startArray("arr2").value("arr_value1").value("arr_value2").endArray().endObject()
|
||||
.endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
SearchResponse response = client.prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addField("_source.obj1") // we also automatically detect _source in fields
|
||||
.addScriptField("s_obj1", "js", "_source.obj1", null)
|
||||
.addScriptField("s_obj1_test", "js", "_source.obj1.test", null)
|
||||
.addScriptField("s_obj2", "js", "_source.obj2", null)
|
||||
.addScriptField("s_obj2_arr2", "js", "_source.obj2.arr2", null)
|
||||
.execute().actionGet();
|
||||
|
||||
Map<String, Object> sObj1 = (Map<String, Object>) response.hits().getAt(0).field("_source.obj1").value();
|
||||
assertThat(sObj1.get("test").toString(), equalTo("something"));
|
||||
assertThat(response.hits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something"));
|
||||
|
||||
sObj1 = (Map<String, Object>) response.hits().getAt(0).field("s_obj1").value();
|
||||
assertThat(sObj1.get("test").toString(), equalTo("something"));
|
||||
assertThat(response.hits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something"));
|
||||
|
||||
Map<String, Object> sObj2 = (Map<String, Object>) response.hits().getAt(0).field("s_obj2").value();
|
||||
List sObj2Arr2 = (List) sObj2.get("arr2");
|
||||
assertThat(sObj2Arr2.size(), equalTo(2));
|
||||
assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1"));
|
||||
assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2"));
|
||||
|
||||
sObj2Arr2 = (List) response.hits().getAt(0).field("s_obj2_arr2").value();
|
||||
assertThat(sObj2Arr2.size(), equalTo(2));
|
||||
assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1"));
|
||||
assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2"));
|
||||
}
|
||||
|
||||
@Test public void testCustomScriptBoost() throws Exception {
|
||||
// execute a search before we create an index
|
||||
try {
|
||||
client.prepareSearch().setQuery(termQuery("test", "value")).execute().actionGet();
|
||||
assert false : "should fail";
|
||||
} catch (Exception e) {
|
||||
// ignore, no indices
|
||||
}
|
||||
|
||||
try {
|
||||
client.prepareSearch("test").setQuery(termQuery("test", "value")).execute().actionGet();
|
||||
assert false : "should fail";
|
||||
} catch (Exception e) {
|
||||
// ignore, no indices
|
||||
}
|
||||
|
||||
client.admin().indices().create(createIndexRequest("test")).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("1")
|
||||
.source(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject())).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("2")
|
||||
.source(jsonBuilder().startObject().field("test", "value check").field("num1", 2.0f).endObject())).actionGet();
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
logger.info("--- QUERY_THEN_FETCH");
|
||||
|
||||
logger.info("running doc['num1'].value");
|
||||
SearchResponse response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("doc['num1'].value").lang("js")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running -doc['num1'].value");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("-doc['num1'].value").lang("js")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("2"));
|
||||
|
||||
|
||||
logger.info("running pow(doc['num1'].value, 2)");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("Math.pow(doc['num1'].value, 2)").lang("js")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running max(doc['num1'].value, 1)");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("Math.max(doc['num1'].value, 1)").lang("js")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running doc['num1'].value * _score");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("doc['num1'].value * _score").lang("js")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running param1 * param2 * _score");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("param1 * param2 * _score").param("param1", 2).param("param2", 2).lang("js")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
}
|
||||
}
|
|
@ -1,71 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.javascript;
|
||||
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class SimpleBench {
|
||||
|
||||
public static void main(String[] args) {
|
||||
JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
Object compiled = se.compile("x + y");
|
||||
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
// warm up
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
vars.put("x", i);
|
||||
vars.put("y", i + 1);
|
||||
se.execute(compiled, vars);
|
||||
}
|
||||
|
||||
final long ITER = 100000;
|
||||
|
||||
StopWatch stopWatch = new StopWatch().start();
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
se.execute(compiled, vars);
|
||||
}
|
||||
System.out.println("Execute Took: " + stopWatch.stop().lastTaskTime());
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
ExecutableScript executableScript = se.executable(compiled, vars);
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
executableScript.run();
|
||||
}
|
||||
System.out.println("Executable Took: " + stopWatch.stop().lastTaskTime());
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
executableScript = se.executable(compiled, vars);
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
for (Map.Entry<String, Object> entry : vars.entrySet()) {
|
||||
executableScript.setNextVar(entry.getKey(), entry.getValue());
|
||||
}
|
||||
executableScript.run();
|
||||
}
|
||||
System.out.println("Executable (vars) Took: " + stopWatch.stop().lastTaskTime());
|
||||
}
|
||||
}
|
|
@ -1,138 +0,0 @@
|
|||
dependsOn(':elasticsearch')
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
apply plugin: 'eclipse'
|
||||
|
||||
archivesBaseName = "elasticsearch-lang-python"
|
||||
|
||||
explodedDistDir = new File(distsDir, 'exploded')
|
||||
|
||||
configurations.compile.transitive = true
|
||||
configurations.testCompile.transitive = true
|
||||
|
||||
// no need to use the resource dir
|
||||
sourceSets.main.resources.srcDirs 'src/main/java'
|
||||
sourceSets.test.resources.srcDirs 'src/test/java'
|
||||
|
||||
// add the source files to the dist jar
|
||||
//jar {
|
||||
// from sourceSets.main.allSource
|
||||
//}
|
||||
|
||||
configurations {
|
||||
dists
|
||||
distLib {
|
||||
visible = false
|
||||
transitive = false
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':elasticsearch')
|
||||
|
||||
compile('org.python:jython-standalone:2.5.2') { transitive = false }
|
||||
distLib('org.python:jython-standalone:2.5.2') { transitive = false }
|
||||
}
|
||||
|
||||
task explodedDist(dependsOn: [jar], description: 'Builds the plugin zip file') << {
|
||||
[explodedDistDir]*.mkdirs()
|
||||
|
||||
copy {
|
||||
from configurations.distLib
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
// remove elasticsearch files (compile above adds the elasticsearch one)
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*.jar") }
|
||||
|
||||
copy {
|
||||
from libsDir
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-javadoc.jar") }
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-sources.jar") }
|
||||
}
|
||||
|
||||
task zip(type: Zip, dependsOn: ['explodedDist']) {
|
||||
from(explodedDistDir) {
|
||||
}
|
||||
}
|
||||
|
||||
task release(dependsOn: [zip]) << {
|
||||
ant.delete(dir: explodedDistDir)
|
||||
copy {
|
||||
from distsDir
|
||||
into(new File(rootProject.distsDir, "plugins"))
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
deployerJars
|
||||
}
|
||||
|
||||
dependencies {
|
||||
deployerJars "org.apache.maven.wagon:wagon-http:1.0-beta-2"
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier = 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
jar {
|
||||
// from sourceSets.main.allJava
|
||||
manifest {
|
||||
attributes("Implementation-Title": "ElasticSearch", "Implementation-Version": rootProject.version, "Implementation-Date": buildTimeStr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
artifacts {
|
||||
archives sourcesJar
|
||||
archives javadocJar
|
||||
}
|
||||
|
||||
uploadArchives {
|
||||
repositories.mavenDeployer {
|
||||
configuration = configurations.deployerJars
|
||||
repository(url: rootProject.mavenRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
snapshotRepository(url: rootProject.mavenSnapshotRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
|
||||
pom.project {
|
||||
inceptionYear '2009'
|
||||
name 'elasticsearch-plugins-lang-python'
|
||||
description 'Python Plugin for ElasticSearch'
|
||||
licenses {
|
||||
license {
|
||||
name 'The Apache Software License, Version 2.0'
|
||||
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
distribution 'repo'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection 'git://github.com/elasticsearch/elasticsearch.git'
|
||||
developerConnection 'git@github.com:elasticsearch/elasticsearch.git'
|
||||
url 'http://github.com/elasticsearch/elasticsearch'
|
||||
}
|
||||
}
|
||||
|
||||
pom.whenConfigured {pom ->
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> dep.scope != 'test' } // removes the test scoped ones
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eclipseClasspath {
|
||||
defaultOutputDir = file('build/eclipse-build')
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
plugin=org.elasticsearch.plugin.python.PythonPlugin
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.python;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.python.PythonScriptEngineService;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class PythonPlugin extends AbstractPlugin {
|
||||
|
||||
@Override public String name() {
|
||||
return "lang-python";
|
||||
}
|
||||
|
||||
@Override public String description() {
|
||||
return "Python plugin allowing to add javascript scripting support";
|
||||
}
|
||||
|
||||
@Override public void processModule(Module module) {
|
||||
if (module instanceof ScriptModule) {
|
||||
((ScriptModule) module).addScriptEngine(PythonScriptEngineService.class);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,206 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.python;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
import org.python.core.Py;
|
||||
import org.python.core.PyCode;
|
||||
import org.python.core.PyObject;
|
||||
import org.python.core.PyStringMap;
|
||||
import org.python.util.PythonInterpreter;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
//TODO we can optimize the case for Map<String, Object> similar to PyStringMap
|
||||
public class PythonScriptEngineService extends AbstractComponent implements ScriptEngineService {
|
||||
|
||||
private final PythonInterpreter interp;
|
||||
|
||||
@Inject public PythonScriptEngineService(Settings settings) {
|
||||
super(settings);
|
||||
|
||||
this.interp = PythonInterpreter.threadLocalStateInterpreter(null);
|
||||
}
|
||||
|
||||
@Override public String[] types() {
|
||||
return new String[]{"python", "py"};
|
||||
}
|
||||
|
||||
@Override public String[] extensions() {
|
||||
return new String[]{"py"};
|
||||
}
|
||||
|
||||
@Override public Object compile(String script) {
|
||||
return interp.compile(script);
|
||||
}
|
||||
|
||||
@Override public ExecutableScript executable(Object compiledScript, Map<String, Object> vars) {
|
||||
return new PythonExecutableScript((PyCode) compiledScript, vars);
|
||||
}
|
||||
|
||||
@Override public SearchScript search(Object compiledScript, SearchLookup lookup, @Nullable Map<String, Object> vars) {
|
||||
return new PythonSearchScript((PyCode) compiledScript, vars, lookup);
|
||||
}
|
||||
|
||||
@Override public Object execute(Object compiledScript, Map<String, Object> vars) {
|
||||
PyObject pyVars = Py.java2py(vars);
|
||||
interp.setLocals(pyVars);
|
||||
PyObject ret = interp.eval((PyCode) compiledScript);
|
||||
if (ret == null) {
|
||||
return null;
|
||||
}
|
||||
return ret.__tojava__(Object.class);
|
||||
}
|
||||
|
||||
@Override public Object unwrap(Object value) {
|
||||
return unwrapValue(value);
|
||||
}
|
||||
|
||||
@Override public void close() {
|
||||
interp.cleanup();
|
||||
}
|
||||
|
||||
public class PythonExecutableScript implements ExecutableScript {
|
||||
|
||||
private final PyCode code;
|
||||
|
||||
private final PyStringMap pyVars;
|
||||
|
||||
public PythonExecutableScript(PyCode code, Map<String, Object> vars) {
|
||||
this.code = code;
|
||||
this.pyVars = new PyStringMap();
|
||||
for (Map.Entry<String, Object> entry : vars.entrySet()) {
|
||||
pyVars.__setitem__(entry.getKey(), Py.java2py(entry.getValue()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void setNextVar(String name, Object value) {
|
||||
pyVars.__setitem__(name, Py.java2py(value));
|
||||
}
|
||||
|
||||
@Override public Object run() {
|
||||
interp.setLocals(pyVars);
|
||||
PyObject ret = interp.eval(code);
|
||||
if (ret == null) {
|
||||
return null;
|
||||
}
|
||||
return ret.__tojava__(Object.class);
|
||||
}
|
||||
|
||||
@Override public Object unwrap(Object value) {
|
||||
return unwrapValue(value);
|
||||
}
|
||||
}
|
||||
|
||||
public class PythonSearchScript implements SearchScript {
|
||||
|
||||
private final PyCode code;
|
||||
|
||||
private final PyStringMap pyVars;
|
||||
|
||||
private final SearchLookup lookup;
|
||||
|
||||
public PythonSearchScript(PyCode code, Map<String, Object> vars, SearchLookup lookup) {
|
||||
this.code = code;
|
||||
this.pyVars = new PyStringMap();
|
||||
for (Map.Entry<String, Object> entry : lookup.asMap().entrySet()) {
|
||||
pyVars.__setitem__(entry.getKey(), Py.java2py(entry.getValue()));
|
||||
}
|
||||
if (vars != null) {
|
||||
for (Map.Entry<String, Object> entry : vars.entrySet()) {
|
||||
pyVars.__setitem__(entry.getKey(), Py.java2py(entry.getValue()));
|
||||
}
|
||||
}
|
||||
this.lookup = lookup;
|
||||
}
|
||||
|
||||
@Override public void setScorer(Scorer scorer) {
|
||||
lookup.setScorer(scorer);
|
||||
}
|
||||
|
||||
@Override public void setNextReader(IndexReader reader) {
|
||||
lookup.setNextReader(reader);
|
||||
}
|
||||
|
||||
@Override public void setNextDocId(int doc) {
|
||||
lookup.setNextDocId(doc);
|
||||
}
|
||||
|
||||
@Override public void setNextSource(Map<String, Object> source) {
|
||||
lookup.source().setNextSource(source);
|
||||
}
|
||||
|
||||
@Override public void setNextScore(float score) {
|
||||
pyVars.__setitem__("_score", Py.java2py(score));
|
||||
}
|
||||
|
||||
@Override public void setNextVar(String name, Object value) {
|
||||
pyVars.__setitem__(name, Py.java2py(value));
|
||||
}
|
||||
|
||||
@Override public Object run() {
|
||||
interp.setLocals(pyVars);
|
||||
PyObject ret = interp.eval(code);
|
||||
if (ret == null) {
|
||||
return null;
|
||||
}
|
||||
return ret.__tojava__(Object.class);
|
||||
}
|
||||
|
||||
@Override public float runAsFloat() {
|
||||
return ((Number) run()).floatValue();
|
||||
}
|
||||
|
||||
@Override public long runAsLong() {
|
||||
return ((Number) run()).longValue();
|
||||
}
|
||||
|
||||
@Override public double runAsDouble() {
|
||||
return ((Number) run()).doubleValue();
|
||||
}
|
||||
|
||||
@Override public Object unwrap(Object value) {
|
||||
return unwrapValue(value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static Object unwrapValue(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
} else if (value instanceof PyObject) {
|
||||
// seems like this is enough, inner PyDictionary will do the conversion for us for example, so expose it directly
|
||||
return ((PyObject) value).__tojava__(Object.class);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.python;
|
||||
|
||||
import org.elasticsearch.common.collect.Lists;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.testng.annotations.AfterClass;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class PythonScriptEngineTests {
|
||||
|
||||
private PythonScriptEngineService se;
|
||||
|
||||
@BeforeClass public void setup() {
|
||||
se = new PythonScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
}
|
||||
|
||||
@AfterClass public void close() {
|
||||
se.close();
|
||||
}
|
||||
|
||||
@Test public void testSimpleEquation() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Object o = se.execute(se.compile("1 + 2"), vars);
|
||||
assertThat(((Number) o).intValue(), equalTo(3));
|
||||
}
|
||||
|
||||
@Test public void testMapAccess() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
|
||||
Map<String, Object> obj2 = MapBuilder.<String, Object>newMapBuilder().put("prop2", "value2").map();
|
||||
Map<String, Object> obj1 = MapBuilder.<String, Object>newMapBuilder().put("prop1", "value1").put("obj2", obj2).put("l", Lists.newArrayList("2", "1")).map();
|
||||
vars.put("obj1", obj1);
|
||||
Object o = se.execute(se.compile("obj1"), vars);
|
||||
assertThat(o, instanceOf(Map.class));
|
||||
obj1 = (Map<String, Object>) o;
|
||||
assertThat((String) obj1.get("prop1"), equalTo("value1"));
|
||||
assertThat((String) ((Map<String, Object>) obj1.get("obj2")).get("prop2"), equalTo("value2"));
|
||||
|
||||
o = se.execute(se.compile("obj1['l'][0]"), vars);
|
||||
assertThat(((String) o), equalTo("2"));
|
||||
}
|
||||
|
||||
@Test public void testObjectMapInter() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Map<String, Object> ctx = new HashMap<String, Object>();
|
||||
Map<String, Object> obj1 = new HashMap<String, Object>();
|
||||
obj1.put("prop1", "value1");
|
||||
ctx.put("obj1", obj1);
|
||||
vars.put("ctx", ctx);
|
||||
|
||||
se.execute(se.compile("ctx['obj2'] = { 'prop2' : 'value2' }; ctx['obj1']['prop1'] = 'uvalue1'"), vars);
|
||||
ctx = (Map<String, Object>) se.unwrap(vars.get("ctx"));
|
||||
assertThat(ctx.containsKey("obj1"), equalTo(true));
|
||||
assertThat((String) ((Map<String, Object>) ctx.get("obj1")).get("prop1"), equalTo("uvalue1"));
|
||||
assertThat(ctx.containsKey("obj2"), equalTo(true));
|
||||
assertThat((String) ((Map<String, Object>) ctx.get("obj2")).get("prop2"), equalTo("value2"));
|
||||
}
|
||||
|
||||
@Test public void testAccessListInScript() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Map<String, Object> obj2 = MapBuilder.<String, Object>newMapBuilder().put("prop2", "value2").map();
|
||||
Map<String, Object> obj1 = MapBuilder.<String, Object>newMapBuilder().put("prop1", "value1").put("obj2", obj2).map();
|
||||
vars.put("l", Lists.newArrayList("1", "2", "3", obj1));
|
||||
|
||||
// Object o = se.execute(se.compile("l.length"), vars);
|
||||
// assertThat(((Number) o).intValue(), equalTo(4));
|
||||
|
||||
Object o = se.execute(se.compile("l[0]"), vars);
|
||||
assertThat(((String) o), equalTo("1"));
|
||||
|
||||
o = se.execute(se.compile("l[3]"), vars);
|
||||
obj1 = (Map<String, Object>) o;
|
||||
assertThat((String) obj1.get("prop1"), equalTo("value1"));
|
||||
assertThat((String) ((Map<String, Object>) obj1.get("obj2")).get("prop2"), equalTo("value2"));
|
||||
|
||||
o = se.execute(se.compile("l[3]['prop1']"), vars);
|
||||
assertThat(((String) o), equalTo("value1"));
|
||||
}
|
||||
|
||||
@Test public void testChangingVarsCrossExecution1() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Map<String, Object> ctx = new HashMap<String, Object>();
|
||||
vars.put("ctx", ctx);
|
||||
Object compiledScript = se.compile("ctx['value']");
|
||||
|
||||
ExecutableScript script = se.executable(compiledScript, vars);
|
||||
ctx.put("value", 1);
|
||||
Object o = script.run();
|
||||
assertThat(((Number) o).intValue(), equalTo(1));
|
||||
|
||||
ctx.put("value", 2);
|
||||
o = script.run();
|
||||
assertThat(((Number) o).intValue(), equalTo(2));
|
||||
}
|
||||
|
||||
@Test public void testChangingVarsCrossExecution2() {
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
Map<String, Object> ctx = new HashMap<String, Object>();
|
||||
Object compiledScript = se.compile("value");
|
||||
|
||||
ExecutableScript script = se.executable(compiledScript, vars);
|
||||
script.setNextVar("value", 1);
|
||||
Object o = script.run();
|
||||
assertThat(((Number) o).intValue(), equalTo(1));
|
||||
|
||||
script.setNextVar("value", 2);
|
||||
o = script.run();
|
||||
assertThat(((Number) o).intValue(), equalTo(2));
|
||||
}
|
||||
}
|
|
@ -1,169 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.python;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.util.concurrent.jsr166y.ThreadLocalRandom;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.CyclicBarrier;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@Test
|
||||
public class PythonScriptMultiThreadedTest {
|
||||
|
||||
protected final ESLogger logger = Loggers.getLogger(getClass());
|
||||
|
||||
@Test public void testExecutableNoRuntimeParams() throws Exception {
|
||||
final PythonScriptEngineService se = new PythonScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
final Object compiled = se.compile("x + y");
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
|
||||
Thread[] threads = new Thread[50];
|
||||
final CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(new Runnable() {
|
||||
@Override public void run() {
|
||||
try {
|
||||
barrier.await();
|
||||
long x = ThreadLocalRandom.current().nextInt();
|
||||
long y = ThreadLocalRandom.current().nextInt();
|
||||
long addition = x + y;
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
vars.put("x", x);
|
||||
vars.put("y", y);
|
||||
ExecutableScript script = se.executable(compiled, vars);
|
||||
for (int i = 0; i < 100000; i++) {
|
||||
long result = ((Number) script.run()).longValue();
|
||||
assertThat(result, equalTo(addition));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
failed.set(true);
|
||||
logger.error("failed", t);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i].start();
|
||||
}
|
||||
barrier.await();
|
||||
latch.await();
|
||||
assertThat(failed.get(), equalTo(false));
|
||||
}
|
||||
|
||||
|
||||
// @Test public void testExecutableWithRuntimeParams() throws Exception {
|
||||
// final PythonScriptEngineService se = new PythonScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
// final Object compiled = se.compile("x + y");
|
||||
// final AtomicBoolean failed = new AtomicBoolean();
|
||||
//
|
||||
// Thread[] threads = new Thread[50];
|
||||
// final CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
// final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
|
||||
// for (int i = 0; i < threads.length; i++) {
|
||||
// threads[i] = new Thread(new Runnable() {
|
||||
// @Override public void run() {
|
||||
// try {
|
||||
// barrier.await();
|
||||
// long x = ThreadLocalRandom.current().nextInt();
|
||||
// Map<String, Object> vars = new HashMap<String, Object>();
|
||||
// vars.put("x", x);
|
||||
// ExecutableScript script = se.executable(compiled, vars);
|
||||
// Map<String, Object> runtimeVars = new HashMap<String, Object>();
|
||||
// for (int i = 0; i < 100000; i++) {
|
||||
// long y = ThreadLocalRandom.current().nextInt();
|
||||
// long addition = x + y;
|
||||
// runtimeVars.put("y", y);
|
||||
// long result = ((Number) script.run(runtimeVars)).longValue();
|
||||
// assertThat(result, equalTo(addition));
|
||||
// }
|
||||
// } catch (Throwable t) {
|
||||
// failed.set(true);
|
||||
// logger.error("failed", t);
|
||||
// } finally {
|
||||
// latch.countDown();
|
||||
// }
|
||||
// }
|
||||
// });
|
||||
// }
|
||||
// for (int i = 0; i < threads.length; i++) {
|
||||
// threads[i].start();
|
||||
// }
|
||||
// barrier.await();
|
||||
// latch.await();
|
||||
// assertThat(failed.get(), equalTo(false));
|
||||
// }
|
||||
|
||||
@Test public void testExecute() throws Exception {
|
||||
final PythonScriptEngineService se = new PythonScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
final Object compiled = se.compile("x + y");
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
|
||||
Thread[] threads = new Thread[50];
|
||||
final CountDownLatch latch = new CountDownLatch(threads.length);
|
||||
final CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i] = new Thread(new Runnable() {
|
||||
@Override public void run() {
|
||||
try {
|
||||
barrier.await();
|
||||
Map<String, Object> runtimeVars = new HashMap<String, Object>();
|
||||
for (int i = 0; i < 100000; i++) {
|
||||
long x = ThreadLocalRandom.current().nextInt();
|
||||
long y = ThreadLocalRandom.current().nextInt();
|
||||
long addition = x + y;
|
||||
runtimeVars.put("x", x);
|
||||
runtimeVars.put("y", y);
|
||||
long result = ((Number) se.execute(compiled, runtimeVars)).longValue();
|
||||
assertThat(result, equalTo(addition));
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
failed.set(true);
|
||||
logger.error("failed", t);
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
for (int i = 0; i < threads.length; i++) {
|
||||
threads[i].start();
|
||||
}
|
||||
barrier.await();
|
||||
latch.await();
|
||||
assertThat(failed.get(), equalTo(false));
|
||||
}
|
||||
}
|
|
@ -1,246 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.python;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.network.NetworkUtils;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.NodeBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.testng.annotations.AfterMethod;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.client.Requests.*;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.*;
|
||||
import static org.elasticsearch.index.query.FilterBuilders.*;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.*;
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class PythonScriptSearchTests {
|
||||
|
||||
protected final ESLogger logger = Loggers.getLogger(getClass());
|
||||
|
||||
private Node node;
|
||||
|
||||
private Client client;
|
||||
|
||||
@BeforeMethod public void createNodes() throws Exception {
|
||||
node = NodeBuilder.nodeBuilder().settings(ImmutableSettings.settingsBuilder()
|
||||
.put("cluster.name", "test-cluster-" + NetworkUtils.getLocalAddress())
|
||||
.put("gateway.type", "none")
|
||||
.put("number_of_shards", 1)).node();
|
||||
client = node.client();
|
||||
}
|
||||
|
||||
@AfterMethod public void closeNodes() {
|
||||
client.close();
|
||||
node.close();
|
||||
}
|
||||
|
||||
@Test public void testPythonFilter() throws Exception {
|
||||
client.admin().indices().prepareCreate("test").execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().prepareFlush().execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "2")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 2.0f).endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().prepareFlush().execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "3")
|
||||
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 3.0f).endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
logger.info("running doc['num1'].value > 1");
|
||||
SearchResponse response = client.prepareSearch()
|
||||
.setQuery(filteredQuery(matchAllQuery(), scriptFilter("doc['num1'].value > 1").lang("python")))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", "python", "doc['num1'].value", null)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat((Double) response.hits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("3"));
|
||||
assertThat((Double) response.hits().getAt(1).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
|
||||
logger.info("running doc['num1'].value > param1");
|
||||
response = client.prepareSearch()
|
||||
.setQuery(filteredQuery(matchAllQuery(), scriptFilter("doc['num1'].value > param1").lang("python").addParam("param1", 2)))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", "python", "doc['num1'].value", null)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(1l));
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("3"));
|
||||
assertThat((Double) response.hits().getAt(0).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
|
||||
logger.info("running doc['num1'].value > param1");
|
||||
response = client.prepareSearch()
|
||||
.setQuery(filteredQuery(matchAllQuery(), scriptFilter("doc['num1'].value > param1").lang("python").addParam("param1", -1)))
|
||||
.addSort("num1", SortOrder.ASC)
|
||||
.addScriptField("sNum1", "python", "doc['num1'].value", null)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(3l));
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("1"));
|
||||
assertThat((Double) response.hits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("2"));
|
||||
assertThat((Double) response.hits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0));
|
||||
assertThat(response.hits().getAt(2).id(), equalTo("3"));
|
||||
assertThat((Double) response.hits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0));
|
||||
}
|
||||
|
||||
@Test public void testScriptFieldUsingSource() throws Exception {
|
||||
client.admin().indices().prepareCreate("test").execute().actionGet();
|
||||
client.prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.startObject("obj1").field("test", "something").endObject()
|
||||
.startObject("obj2").startArray("arr2").value("arr_value1").value("arr_value2").endArray().endObject()
|
||||
.endObject())
|
||||
.execute().actionGet();
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
SearchResponse response = client.prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addField("_source.obj1") // we also automatically detect _source in fields
|
||||
.addScriptField("s_obj1", "python", "_source['obj1']", null)
|
||||
.addScriptField("s_obj1_test", "python", "_source['obj1']['test']", null)
|
||||
.addScriptField("s_obj2", "python", "_source['obj2']", null)
|
||||
.addScriptField("s_obj2_arr2", "python", "_source['obj2']['arr2']", null)
|
||||
.execute().actionGet();
|
||||
|
||||
Map<String, Object> sObj1 = (Map<String, Object>) response.hits().getAt(0).field("_source.obj1").value();
|
||||
assertThat(sObj1.get("test").toString(), equalTo("something"));
|
||||
assertThat(response.hits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something"));
|
||||
|
||||
sObj1 = (Map<String, Object>) response.hits().getAt(0).field("s_obj1").value();
|
||||
assertThat(sObj1.get("test").toString(), equalTo("something"));
|
||||
assertThat(response.hits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something"));
|
||||
|
||||
Map<String, Object> sObj2 = (Map<String, Object>) response.hits().getAt(0).field("s_obj2").value();
|
||||
List sObj2Arr2 = (List) sObj2.get("arr2");
|
||||
assertThat(sObj2Arr2.size(), equalTo(2));
|
||||
assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1"));
|
||||
assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2"));
|
||||
|
||||
sObj2Arr2 = (List) response.hits().getAt(0).field("s_obj2_arr2").value();
|
||||
assertThat(sObj2Arr2.size(), equalTo(2));
|
||||
assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1"));
|
||||
assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2"));
|
||||
}
|
||||
|
||||
@Test public void testCustomScriptBoost() throws Exception {
|
||||
// execute a search before we create an index
|
||||
try {
|
||||
client.prepareSearch().setQuery(termQuery("test", "value")).execute().actionGet();
|
||||
assert false : "should fail";
|
||||
} catch (Exception e) {
|
||||
// ignore, no indices
|
||||
}
|
||||
|
||||
try {
|
||||
client.prepareSearch("test").setQuery(termQuery("test", "value")).execute().actionGet();
|
||||
assert false : "should fail";
|
||||
} catch (Exception e) {
|
||||
// ignore, no indices
|
||||
}
|
||||
|
||||
client.admin().indices().create(createIndexRequest("test")).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("1")
|
||||
.source(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).endObject())).actionGet();
|
||||
client.index(indexRequest("test").type("type1").id("2")
|
||||
.source(jsonBuilder().startObject().field("test", "value check").field("num1", 2.0f).endObject())).actionGet();
|
||||
client.admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
logger.info("--- QUERY_THEN_FETCH");
|
||||
|
||||
logger.info("running doc['num1'].value");
|
||||
SearchResponse response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("doc['num1'].value").lang("python")))
|
||||
).actionGet();
|
||||
|
||||
assertThat("Failures " + Arrays.toString(response.shardFailures()), response.shardFailures().length, equalTo(0));
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running -doc['num1'].value");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("-doc['num1'].value").lang("python")))
|
||||
).actionGet();
|
||||
|
||||
assertThat("Failures " + Arrays.toString(response.shardFailures()), response.shardFailures().length, equalTo(0));
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("2"));
|
||||
|
||||
|
||||
logger.info("running doc['num1'].value * _score");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("doc['num1'].value * _score").lang("python")))
|
||||
).actionGet();
|
||||
|
||||
assertThat("Failures " + Arrays.toString(response.shardFailures()), response.shardFailures().length, equalTo(0));
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
assertThat(response.hits().getAt(0).id(), equalTo("2"));
|
||||
assertThat(response.hits().getAt(1).id(), equalTo("1"));
|
||||
|
||||
logger.info("running param1 * param2 * _score");
|
||||
response = client.search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).query(customScoreQuery(termQuery("test", "value")).script("param1 * param2 * _score").param("param1", 2).param("param2", 2).lang("python")))
|
||||
).actionGet();
|
||||
|
||||
assertThat("Failures " + Arrays.toString(response.shardFailures()), response.shardFailures().length, equalTo(0));
|
||||
|
||||
assertThat(response.hits().totalHits(), equalTo(2l));
|
||||
logger.info("Hit[0] {} Explanation {}", response.hits().getAt(0).id(), response.hits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.hits().getAt(1).id(), response.hits().getAt(1).explanation());
|
||||
}
|
||||
}
|
|
@ -1,71 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.script.python;
|
||||
|
||||
import org.elasticsearch.common.StopWatch;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class SimpleBench {
|
||||
|
||||
public static void main(String[] args) {
|
||||
PythonScriptEngineService se = new PythonScriptEngineService(ImmutableSettings.Builder.EMPTY_SETTINGS);
|
||||
Object compiled = se.compile("x + y");
|
||||
|
||||
Map<String, Object> vars = new HashMap<String, Object>();
|
||||
// warm up
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
vars.put("x", i);
|
||||
vars.put("y", i + 1);
|
||||
se.execute(compiled, vars);
|
||||
}
|
||||
|
||||
final long ITER = 100000;
|
||||
|
||||
StopWatch stopWatch = new StopWatch().start();
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
se.execute(compiled, vars);
|
||||
}
|
||||
System.out.println("Execute Took: " + stopWatch.stop().lastTaskTime());
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
ExecutableScript executableScript = se.executable(compiled, vars);
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
executableScript.run();
|
||||
}
|
||||
System.out.println("Executable Took: " + stopWatch.stop().lastTaskTime());
|
||||
|
||||
stopWatch = new StopWatch().start();
|
||||
executableScript = se.executable(compiled, vars);
|
||||
for (long i = 0; i < ITER; i++) {
|
||||
for (Map.Entry<String, Object> entry : vars.entrySet()) {
|
||||
executableScript.setNextVar(entry.getKey(), entry.getValue());
|
||||
}
|
||||
executableScript.run();
|
||||
}
|
||||
System.out.println("Executable (vars) Took: " + stopWatch.stop().lastTaskTime());
|
||||
}
|
||||
}
|
|
@ -1,132 +0,0 @@
|
|||
dependsOn(':elasticsearch')
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
apply plugin: 'eclipse'
|
||||
|
||||
archivesBaseName = "elasticsearch-mapper-attachments"
|
||||
|
||||
explodedDistDir = new File(distsDir, 'exploded')
|
||||
|
||||
configurations.compile.transitive = true
|
||||
configurations.testCompile.transitive = true
|
||||
|
||||
// no need to use the resource dir
|
||||
sourceSets.main.resources.srcDirs 'src/main/java'
|
||||
sourceSets.test.resources.srcDirs 'src/test/java'
|
||||
|
||||
jar {
|
||||
// from sourceSets.main.allJava
|
||||
manifest {
|
||||
attributes("Implementation-Title": "ElasticSearch", "Implementation-Version": rootProject.version, "Implementation-Date": buildTimeStr)
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
dists
|
||||
distLib {
|
||||
visible = false
|
||||
transitive = false
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':elasticsearch')
|
||||
|
||||
compile('org.apache.tika:tika-app:0.10') { transitive = false }
|
||||
distLib('org.apache.tika:tika-app:0.10') { transitive = false }
|
||||
}
|
||||
|
||||
task explodedDist(dependsOn: [jar], description: 'Builds the plugin zip file') << {
|
||||
[explodedDistDir]*.mkdirs()
|
||||
|
||||
copy {
|
||||
from configurations.distLib
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
// remove elasticsearch files (compile above adds the elasticsearch one)
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*.jar") }
|
||||
|
||||
copy {
|
||||
from libsDir
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-javadoc.jar") }
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-sources.jar") }
|
||||
}
|
||||
|
||||
task zip(type: Zip, dependsOn: ['explodedDist']) {
|
||||
from(explodedDistDir) {
|
||||
}
|
||||
}
|
||||
|
||||
task release(dependsOn: [zip]) << {
|
||||
ant.delete(dir: explodedDistDir)
|
||||
copy {
|
||||
from distsDir
|
||||
into(new File(rootProject.distsDir, "plugins"))
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
deployerJars
|
||||
}
|
||||
|
||||
dependencies {
|
||||
deployerJars "org.apache.maven.wagon:wagon-http:1.0-beta-2"
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier = 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
artifacts {
|
||||
archives sourcesJar
|
||||
archives javadocJar
|
||||
}
|
||||
|
||||
uploadArchives {
|
||||
repositories.mavenDeployer {
|
||||
configuration = configurations.deployerJars
|
||||
repository(url: rootProject.mavenRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
snapshotRepository(url: rootProject.mavenSnapshotRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
|
||||
pom.project {
|
||||
inceptionYear '2009'
|
||||
name 'elasticsearch-plugins-mapper-attachments'
|
||||
description 'Attachments Plugin for ElasticSearch'
|
||||
licenses {
|
||||
license {
|
||||
name 'The Apache Software License, Version 2.0'
|
||||
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
distribution 'repo'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection 'git://github.com/elasticsearch/elasticsearch.git'
|
||||
developerConnection 'git@github.com:elasticsearch/elasticsearch.git'
|
||||
url 'http://github.com/elasticsearch/elasticsearch'
|
||||
}
|
||||
}
|
||||
|
||||
pom.whenConfigured {pom ->
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> dep.scope != 'test' } // removes the test scoped ones
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eclipseClasspath {
|
||||
defaultOutputDir = file('build/eclipse-build')
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
plugin=org.elasticsearch.plugin.mapper.attachments.MapperAttachmentsPlugin
|
|
@ -1,341 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.attachment;
|
||||
|
||||
import org.apache.tika.exception.TikaException;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
import org.elasticsearch.common.io.FastByteArrayInputStream;
|
||||
import org.elasticsearch.common.netty.util.internal.SystemPropertyUtil;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.FieldMapperListener;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeContext;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.ObjectMapperListener;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.*;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
|
||||
import static org.elasticsearch.plugin.mapper.attachments.tika.TikaInstance.*;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* field1 : "..."
|
||||
* </pre>
|
||||
* <p>Or:
|
||||
* <pre>
|
||||
* {
|
||||
* file1 : {
|
||||
* _content_type : "application/pdf",
|
||||
* _name : "..../something.pdf",
|
||||
* content : ""
|
||||
* }
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AttachmentMapper implements Mapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "attachment";
|
||||
|
||||
public static class Defaults {
|
||||
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
|
||||
}
|
||||
|
||||
public static class Builder extends Mapper.Builder<Builder, AttachmentMapper> {
|
||||
|
||||
private ContentPath.Type pathType = Defaults.PATH_TYPE;
|
||||
|
||||
private StringFieldMapper.Builder contentBuilder;
|
||||
|
||||
private StringFieldMapper.Builder titleBuilder = stringField("title");
|
||||
|
||||
private StringFieldMapper.Builder authorBuilder = stringField("author");
|
||||
|
||||
private StringFieldMapper.Builder keywordsBuilder = stringField("keywords");
|
||||
|
||||
private DateFieldMapper.Builder dateBuilder = dateField("date");
|
||||
|
||||
private StringFieldMapper.Builder contentTypeBuilder = stringField("content_type");
|
||||
|
||||
public Builder(String name) {
|
||||
super(name);
|
||||
this.builder = this;
|
||||
this.contentBuilder = stringField(name);
|
||||
}
|
||||
|
||||
public Builder pathType(ContentPath.Type pathType) {
|
||||
this.pathType = pathType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder content(StringFieldMapper.Builder content) {
|
||||
this.contentBuilder = content;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder date(DateFieldMapper.Builder date) {
|
||||
this.dateBuilder = date;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder author(StringFieldMapper.Builder author) {
|
||||
this.authorBuilder = author;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder title(StringFieldMapper.Builder title) {
|
||||
this.titleBuilder = title;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder keywords(StringFieldMapper.Builder keywords) {
|
||||
this.keywordsBuilder = keywords;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder contentType(StringFieldMapper.Builder contentType) {
|
||||
this.contentTypeBuilder = contentType;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public AttachmentMapper build(BuilderContext context) {
|
||||
ContentPath.Type origPathType = context.path().pathType();
|
||||
context.path().pathType(pathType);
|
||||
|
||||
// create the content mapper under the actual name
|
||||
StringFieldMapper contentMapper = contentBuilder.build(context);
|
||||
|
||||
// create the DC one under the name
|
||||
context.path().add(name);
|
||||
DateFieldMapper dateMapper = dateBuilder.build(context);
|
||||
StringFieldMapper authorMapper = authorBuilder.build(context);
|
||||
StringFieldMapper titleMapper = titleBuilder.build(context);
|
||||
StringFieldMapper keywordsMapper = keywordsBuilder.build(context);
|
||||
StringFieldMapper contentTypeMapper = contentTypeBuilder.build(context);
|
||||
context.path().remove();
|
||||
|
||||
context.path().pathType(origPathType);
|
||||
|
||||
return new AttachmentMapper(name, pathType, contentMapper, dateMapper, titleMapper, authorMapper, keywordsMapper, contentTypeMapper);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* field1 : { type : "attachment" }
|
||||
* </pre>
|
||||
* Or:
|
||||
* <pre>
|
||||
* field1 : {
|
||||
* type : "attachment",
|
||||
* fields : {
|
||||
* field1 : {type : "binary"},
|
||||
* title : {store : "yes"},
|
||||
* date : {store : "yes"}
|
||||
* }
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
|
||||
@SuppressWarnings({"unchecked"}) @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
AttachmentMapper.Builder builder = new AttachmentMapper.Builder(name);
|
||||
|
||||
for (Map.Entry<String, Object> entry : node.entrySet()) {
|
||||
String fieldName = entry.getKey();
|
||||
Object fieldNode = entry.getValue();
|
||||
if (fieldName.equals("path")) {
|
||||
builder.pathType(parsePathType(name, fieldNode.toString()));
|
||||
} else if (fieldName.equals("fields")) {
|
||||
Map<String, Object> fieldsNode = (Map<String, Object>) fieldNode;
|
||||
for (Map.Entry<String, Object> entry1 : fieldsNode.entrySet()) {
|
||||
String propName = entry1.getKey();
|
||||
Object propNode = entry1.getValue();
|
||||
|
||||
if (name.equals(propName)) {
|
||||
// that is the content
|
||||
builder.content((StringFieldMapper.Builder) parserContext.typeParser("string").parse(name, (Map<String, Object>) propNode, parserContext));
|
||||
} else if ("date".equals(propName)) {
|
||||
builder.date((DateFieldMapper.Builder) parserContext.typeParser("date").parse("date", (Map<String, Object>) propNode, parserContext));
|
||||
} else if ("title".equals(propName)) {
|
||||
builder.title((StringFieldMapper.Builder) parserContext.typeParser("string").parse("title", (Map<String, Object>) propNode, parserContext));
|
||||
} else if ("author".equals(propName)) {
|
||||
builder.author((StringFieldMapper.Builder) parserContext.typeParser("string").parse("author", (Map<String, Object>) propNode, parserContext));
|
||||
} else if ("keywords".equals(propName)) {
|
||||
builder.keywords((StringFieldMapper.Builder) parserContext.typeParser("string").parse("keywords", (Map<String, Object>) propNode, parserContext));
|
||||
} else if ("content_type".equals(propName)) {
|
||||
builder.contentType((StringFieldMapper.Builder) parserContext.typeParser("string").parse("content_type", (Map<String, Object>) propNode, parserContext));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
private final String name;
|
||||
|
||||
private final ContentPath.Type pathType;
|
||||
|
||||
private final StringFieldMapper contentMapper;
|
||||
|
||||
private final DateFieldMapper dateMapper;
|
||||
|
||||
private final StringFieldMapper authorMapper;
|
||||
|
||||
private final StringFieldMapper titleMapper;
|
||||
|
||||
private final StringFieldMapper keywordsMapper;
|
||||
|
||||
private final StringFieldMapper contentTypeMapper;
|
||||
|
||||
public AttachmentMapper(String name, ContentPath.Type pathType, StringFieldMapper contentMapper,
|
||||
DateFieldMapper dateMapper, StringFieldMapper titleMapper, StringFieldMapper authorMapper,
|
||||
StringFieldMapper keywordsMapper, StringFieldMapper contentTypeMapper) {
|
||||
this.name = name;
|
||||
this.pathType = pathType;
|
||||
this.contentMapper = contentMapper;
|
||||
this.dateMapper = dateMapper;
|
||||
this.titleMapper = titleMapper;
|
||||
this.authorMapper = authorMapper;
|
||||
this.keywordsMapper = keywordsMapper;
|
||||
this.contentTypeMapper = contentTypeMapper;
|
||||
}
|
||||
|
||||
@Override public String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override public void parse(ParseContext context) throws IOException {
|
||||
byte[] content = null;
|
||||
String contentType = null;
|
||||
String name = null;
|
||||
|
||||
XContentParser parser = context.parser();
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
content = parser.binaryValue();
|
||||
} else {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if ("content".equals(currentFieldName)) {
|
||||
content = parser.binaryValue();
|
||||
} else if ("_content_type".equals(currentFieldName)) {
|
||||
contentType = parser.text();
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
name = parser.text();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Metadata metadata = new Metadata();
|
||||
if (contentType != null) {
|
||||
metadata.add(Metadata.CONTENT_TYPE, contentType);
|
||||
}
|
||||
if (name != null) {
|
||||
metadata.add(Metadata.RESOURCE_NAME_KEY, name);
|
||||
}
|
||||
|
||||
String parsedContent;
|
||||
try {
|
||||
parsedContent = tika().parseToString(new FastByteArrayInputStream(content), metadata);
|
||||
} catch (TikaException e) {
|
||||
throw new MapperParsingException("Failed to extract text for [" + name + "]", e);
|
||||
}
|
||||
|
||||
context.externalValue(parsedContent);
|
||||
contentMapper.parse(context);
|
||||
|
||||
context.externalValue(metadata.get(Metadata.DATE));
|
||||
dateMapper.parse(context);
|
||||
|
||||
context.externalValue(metadata.get(Metadata.TITLE));
|
||||
titleMapper.parse(context);
|
||||
|
||||
context.externalValue(metadata.get(Metadata.AUTHOR));
|
||||
authorMapper.parse(context);
|
||||
|
||||
context.externalValue(metadata.get(Metadata.KEYWORDS));
|
||||
keywordsMapper.parse(context);
|
||||
|
||||
context.externalValue(metadata.get(Metadata.CONTENT_TYPE));
|
||||
contentTypeMapper.parse(context);
|
||||
}
|
||||
|
||||
@Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
|
||||
// ignore this for now
|
||||
}
|
||||
|
||||
@Override public void traverse(FieldMapperListener fieldMapperListener) {
|
||||
contentMapper.traverse(fieldMapperListener);
|
||||
dateMapper.traverse(fieldMapperListener);
|
||||
titleMapper.traverse(fieldMapperListener);
|
||||
authorMapper.traverse(fieldMapperListener);
|
||||
keywordsMapper.traverse(fieldMapperListener);
|
||||
contentTypeMapper.traverse(fieldMapperListener);
|
||||
}
|
||||
|
||||
@Override public void traverse(ObjectMapperListener objectMapperListener) {
|
||||
}
|
||||
|
||||
@Override public void close() {
|
||||
contentMapper.close();
|
||||
dateMapper.close();
|
||||
titleMapper.close();
|
||||
authorMapper.close();
|
||||
keywordsMapper.close();
|
||||
contentTypeMapper.close();
|
||||
}
|
||||
|
||||
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(name);
|
||||
builder.field("type", CONTENT_TYPE);
|
||||
builder.field("path", pathType.name().toLowerCase());
|
||||
|
||||
builder.startObject("fields");
|
||||
contentMapper.toXContent(builder, params);
|
||||
authorMapper.toXContent(builder, params);
|
||||
titleMapper.toXContent(builder, params);
|
||||
dateMapper.toXContent(builder, params);
|
||||
keywordsMapper.toXContent(builder, params);
|
||||
contentTypeMapper.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.attachment;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class RegisterAttachmentType extends AbstractIndexComponent {
|
||||
|
||||
@Inject public RegisterAttachmentType(Index index, @IndexSettings Settings indexSettings, MapperService mapperService) {
|
||||
super(index, indexSettings);
|
||||
|
||||
mapperService.documentMapperParser().putTypeParser("attachment", new AttachmentMapper.TypeParser());
|
||||
}
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.mapper.attachments;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.index.mapper.attachment.RegisterAttachmentType;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AttachmentsIndexModule extends AbstractModule {
|
||||
|
||||
@Override protected void configure() {
|
||||
bind(RegisterAttachmentType.class).asEagerSingleton();
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.mapper.attachments;
|
||||
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.plugins.AbstractPlugin;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.elasticsearch.common.collect.Lists.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class MapperAttachmentsPlugin extends AbstractPlugin {
|
||||
|
||||
@Override public String name() {
|
||||
return "mapper-attachments";
|
||||
}
|
||||
|
||||
@Override public String description() {
|
||||
return "Adds the attachment type allowing to parse difference attachment formats";
|
||||
}
|
||||
|
||||
@Override public Collection<Class<? extends Module>> indexModules() {
|
||||
Collection<Class<? extends Module>> modules = newArrayList();
|
||||
modules.add(AttachmentsIndexModule.class);
|
||||
return modules;
|
||||
}
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.mapper.attachments.tika;
|
||||
|
||||
import org.apache.tika.Tika;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class TikaInstance {
|
||||
|
||||
private static final Tika tika = new Tika();
|
||||
|
||||
public static Tika tika() {
|
||||
return tika;
|
||||
}
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.xcontent;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.attachment.AttachmentMapper;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.common.io.Streams.*;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.*;
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@Test
|
||||
public class SimpleAttachmentMapperTests {
|
||||
|
||||
private DocumentMapperParser mapperParser;
|
||||
|
||||
@BeforeClass public void setupMapperParser() {
|
||||
mapperParser = new DocumentMapperParser(new Index("test"), new AnalysisService(new Index("test")));
|
||||
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
|
||||
}
|
||||
|
||||
@Test public void testSimpleMappings() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/xcontent/test-mapping.json");
|
||||
DocumentMapper docMapper = mapperParser.parse(mapping);
|
||||
byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/xcontent/testXHTML.html");
|
||||
|
||||
byte[] json = jsonBuilder().startObject().field("_id", 1).field("file", html).endObject().copiedBytes();
|
||||
|
||||
Document doc = docMapper.parse(json).rootDoc();
|
||||
|
||||
assertThat(doc.get(docMapper.mappers().smartName("file.content_type").mapper().names().indexName()), equalTo("application/xhtml+xml"));
|
||||
assertThat(doc.get(docMapper.mappers().smartName("file.title").mapper().names().indexName()), equalTo("XHTML test document"));
|
||||
assertThat(doc.get(docMapper.mappers().smartName("file").mapper().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content"));
|
||||
|
||||
// re-parse it
|
||||
String builtMapping = docMapper.mappingSource().string();
|
||||
docMapper = mapperParser.parse(builtMapping);
|
||||
|
||||
json = jsonBuilder().startObject().field("_id", 1).field("file", html).endObject().copiedBytes();
|
||||
|
||||
doc = docMapper.parse(json).rootDoc();
|
||||
|
||||
assertThat(doc.get(docMapper.mappers().smartName("file.content_type").mapper().names().indexName()), equalTo("application/xhtml+xml"));
|
||||
assertThat(doc.get(docMapper.mappers().smartName("file.title").mapper().names().indexName()), equalTo("XHTML test document"));
|
||||
assertThat(doc.get(docMapper.mappers().smartName("file").mapper().names().indexName()), containsString("This document tests the ability of Apache Tika to extract content"));
|
||||
}
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
person : {
|
||||
properties : {
|
||||
"file" : { type : "attachment" }
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head>
|
||||
<title>XHTML test document</title>
|
||||
<meta name="Author" content="Tika Developers"/>
|
||||
<meta http-equiv="refresh" content="5"/>
|
||||
</head>
|
||||
<body>
|
||||
<p>
|
||||
This document tests the ability of Apache Tika to extract content
|
||||
from an <a href="http://www.w3.org/TR/xhtml1/">XHTML document</a>.
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
|
@ -1,95 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.plugin.mapper.attachments.test;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.action.count.CountResponse;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.network.NetworkUtils;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.testng.annotations.AfterClass;
|
||||
import org.testng.annotations.AfterMethod;
|
||||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.BeforeMethod;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.client.Requests.*;
|
||||
import static org.elasticsearch.common.io.Streams.*;
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.*;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.*;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
import static org.elasticsearch.node.NodeBuilder.*;
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@Test
|
||||
public class SimpleAttachmentIntegrationTests {
|
||||
|
||||
private final ESLogger logger = Loggers.getLogger(getClass());
|
||||
|
||||
private Node node;
|
||||
|
||||
@BeforeClass public void setupServer() {
|
||||
node = nodeBuilder().local(true).settings(settingsBuilder()
|
||||
.put("cluster.name", "test-cluster-" + NetworkUtils.getLocalAddress())
|
||||
.put("gateway.type", "none")).node();
|
||||
}
|
||||
|
||||
@AfterClass public void closeServer() {
|
||||
node.close();
|
||||
}
|
||||
|
||||
@BeforeMethod public void createIndex() {
|
||||
logger.info("creating index [test]");
|
||||
node.client().admin().indices().create(createIndexRequest("test").settings(settingsBuilder().put("index.numberOfReplicas", 0))).actionGet();
|
||||
logger.info("Running Cluster Health");
|
||||
ClusterHealthResponse clusterHealth = node.client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus()).actionGet();
|
||||
logger.info("Done Cluster Health, status " + clusterHealth.status());
|
||||
assertThat(clusterHealth.timedOut(), equalTo(false));
|
||||
assertThat(clusterHealth.status(), equalTo(ClusterHealthStatus.GREEN));
|
||||
}
|
||||
|
||||
@AfterMethod public void deleteIndex() {
|
||||
logger.info("deleting index [test]");
|
||||
node.client().admin().indices().delete(deleteIndexRequest("test")).actionGet();
|
||||
}
|
||||
|
||||
@Test public void testSimpleAttachment() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/xcontent/test-mapping.json");
|
||||
byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/xcontent/testXHTML.html");
|
||||
|
||||
node.client().admin().indices().putMapping(putMappingRequest("test").type("person").source(mapping)).actionGet();
|
||||
|
||||
node.client().index(indexRequest("test").type("person")
|
||||
.source(jsonBuilder().startObject().field("file", html).endObject())).actionGet();
|
||||
node.client().admin().indices().refresh(refreshRequest()).actionGet();
|
||||
|
||||
CountResponse countResponse = node.client().count(countRequest("test").query(fieldQuery("file.title", "test document"))).actionGet();
|
||||
assertThat(countResponse.count(), equalTo(1l));
|
||||
|
||||
countResponse = node.client().count(countRequest("test").query(fieldQuery("file", "tests the ability"))).actionGet();
|
||||
assertThat(countResponse.count(), equalTo(1l));
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
/data
|
|
@ -1,129 +0,0 @@
|
|||
dependsOn(':elasticsearch')
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
apply plugin: 'eclipse'
|
||||
|
||||
archivesBaseName = "elasticsearch-river-couchdb"
|
||||
|
||||
explodedDistDir = new File(distsDir, 'exploded')
|
||||
|
||||
manifest.mainAttributes("Implementation-Title": "ElasticSearch::Plugins::River::CouchDB", "Implementation-Version": rootProject.version, "Implementation-Date": buildTimeStr)
|
||||
|
||||
configurations.compile.transitive = true
|
||||
configurations.testCompile.transitive = true
|
||||
|
||||
// no need to use the resource dir
|
||||
sourceSets.main.resources.srcDirs 'src/main/java'
|
||||
sourceSets.test.resources.srcDirs 'src/test/java'
|
||||
|
||||
// add the source files to the dist jar
|
||||
//jar {
|
||||
// from sourceSets.main.allJava
|
||||
//}
|
||||
|
||||
configurations {
|
||||
dists
|
||||
distLib {
|
||||
visible = false
|
||||
transitive = false
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':elasticsearch')
|
||||
}
|
||||
|
||||
task explodedDist(dependsOn: [jar], description: 'Builds the plugin zip file') << {
|
||||
[explodedDistDir]*.mkdirs()
|
||||
|
||||
copy {
|
||||
from configurations.distLib
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
// remove elasticsearch files (compile above adds the elasticsearch one)
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*.jar") }
|
||||
|
||||
copy {
|
||||
from libsDir
|
||||
into explodedDistDir
|
||||
}
|
||||
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-javadoc.jar") }
|
||||
ant.delete { fileset(dir: explodedDistDir, includes: "elasticsearch-*-sources.jar") }
|
||||
}
|
||||
|
||||
task zip(type: Zip, dependsOn: ['explodedDist']) {
|
||||
from(explodedDistDir) {
|
||||
}
|
||||
}
|
||||
|
||||
task release(dependsOn: [zip]) << {
|
||||
ant.delete(dir: explodedDistDir)
|
||||
copy {
|
||||
from distsDir
|
||||
into(new File(rootProject.distsDir, "plugins"))
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
deployerJars
|
||||
}
|
||||
|
||||
dependencies {
|
||||
deployerJars "org.apache.maven.wagon:wagon-http:1.0-beta-2"
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier = 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
artifacts {
|
||||
archives sourcesJar
|
||||
archives javadocJar
|
||||
}
|
||||
|
||||
uploadArchives {
|
||||
repositories.mavenDeployer {
|
||||
configuration = configurations.deployerJars
|
||||
repository(url: rootProject.mavenRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
snapshotRepository(url: rootProject.mavenSnapshotRepoUrl) {
|
||||
authentication(userName: rootProject.mavenRepoUser, password: rootProject.mavenRepoPass)
|
||||
}
|
||||
|
||||
pom.project {
|
||||
inceptionYear '2009'
|
||||
name 'elasticsearch-plugins-river-couchdb'
|
||||
description 'CouchDB River Plugin for ElasticSearch'
|
||||
licenses {
|
||||
license {
|
||||
name 'The Apache Software License, Version 2.0'
|
||||
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
distribution 'repo'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
connection 'git://github.com/elasticsearch/elasticsearch.git'
|
||||
developerConnection 'git@github.com:elasticsearch/elasticsearch.git'
|
||||
url 'http://github.com/elasticsearch/elasticsearch'
|
||||
}
|
||||
}
|
||||
|
||||
pom.whenConfigured {pom ->
|
||||
pom.dependencies = pom.dependencies.findAll {dep -> dep.scope != 'test' } // removes the test scoped ones
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eclipseClasspath {
|
||||
defaultOutputDir = file('build/eclipse-build')
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
plugin=org.elasticsearch.plugin.river.couchdb.CouchdbRiverPlugin
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue