Build: Switch mapper attachments to gradle

This change switches the plugin to use gradle as the build system, and
updates the master branch to track elasticsearch master.
This commit is contained in:
Ryan Ernst 2015-11-06 14:04:59 -08:00
parent 16674bd44e
commit 867f496ac8
17 changed files with 158 additions and 742 deletions

3
.gitignore vendored
View File

@ -12,3 +12,6 @@
/.local-execution-hints.log
/.local-*-execution-hints.log
/eclipse-build/
build/
generated-resources/
.gradle/

83
build.gradle Normal file
View File

@ -0,0 +1,83 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.elasticsearch.gradle.ElasticsearchProperties
buildscript {
repositories {
mavenCentral()
maven {
name 'sonatype-snapshots'
url 'http://oss.sonatype.org/content/repositories/snapshots/'
}
jcenter()
}
dependencies {
classpath 'org.elasticsearch.gradle:build-tools:3.0.0-SNAPSHOT'
}
}
apply plugin: 'idea'
apply plugin: 'eclipse'
apply plugin: 'elasticsearch.esplugin'
esplugin {
description 'The mapper attachments plugin adds the attachment type to Elasticsearch using Apache Tika.'
classname 'org.elasticsearch.plugin.mapper.attachments.MapperAttachmentsPlugin'
}
project.group = 'org.elasticsearch'
project.version = ElasticsearchProperties.version
project.ext.luceneVersion = ElasticsearchProperties.luceneVersion
repositories {
mavenCentral()
maven {
name 'sonatype-snapshots'
url 'http://oss.sonatype.org/content/repositories/snapshots/'
}
if (luceneVersion.contains('-snapshot')) {
String revision = (luceneVersion =~ /\d\.\d\.\d-snapshot-(\d+)/)[0][1]
maven {
name 'lucene-snapshots'
url "http://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/${revision}"
}
}
}
dependencies {
compile('org.apache.tika:tika-parsers:1.10') {
// TODO: is all of edu.ucar incompatibile with apache2 license? if so, we can exclude the group?
// Not Apache2 License compatible
exclude group: 'edu.ucar', module: 'netcdf'
exclude group: 'edu.ucar', module: 'cdm'
exclude group: 'edu.ucar', module: 'httpservices'
exclude group: 'edu.ucar', module: 'grib'
exclude group: 'edu.ucar', module: 'netcdf4'
exclude group: 'com.uwyn', module: 'jhighlight'
// ES core already has these
exclude group: 'org.aw2.asm', module: 'asm-debug-all'
exclude group: 'commons-logging', module: 'commons-logging-api'
}
}
compileJava.options.compilerArgs << '-Xlint:-cast,-deprecation,-rawtypes'
forbiddenPatterns {
exclude '**/*.docx'
exclude '**/*.pdf'
}

118
pom.xml
View File

@ -1,118 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch-mapper-attachments</artifactId>
<version>3.1.0-SNAPSHOT</version>
<name>Elasticsearch Mapper Attachment plugin</name>
<description>The mapper attachments plugin adds the attachment type to Elasticsearch using Apache Tika.</description>
<url>https://github.com/elastic/elasticsearch-mapper-attachments/</url>
<inceptionYear>2009</inceptionYear>
<licenses>
<license>
<name>The Apache Software License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:git@github.com:elastic/elasticsearch-mapper-attachments.git</connection>
<developerConnection>scm:git:git@github.com:elastic/elasticsearch-mapper-attachments.git</developerConnection>
<url>http://github.com/elastic/elasticsearch-mapper-attachments</url>
</scm>
<parent>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>plugins</artifactId>
<version>2.1.0-SNAPSHOT</version>
</parent>
<properties>
<elasticsearch.version>2.1.0-SNAPSHOT</elasticsearch.version>
<elasticsearch.plugin.name>mapper-attachments</elasticsearch.plugin.name>
<elasticsearch.plugin.classname>org.elasticsearch.plugin.mapper.attachments.MapperAttachmentsPlugin</elasticsearch.plugin.classname>
<elasticsearch.assembly.descriptor>${project.basedir}/src/main/assemblies/plugin.xml</elasticsearch.assembly.descriptor>
<tests.rest.suite>mapper_attachments</tests.rest.suite>
<tests.rest.load_packaged>false</tests.rest.load_packaged>
<elasticsearch.integ.antfile>${project.basedir}/src/test/resources/integ-tests-183.xml</elasticsearch.integ.antfile>
</properties>
<dependencies>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>${elasticsearch.version}</version>
<scope>provided</scope>
</dependency>
<!-- Tika -->
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-parsers</artifactId>
<version>1.10</version>
<exclusions>
<!-- Not Apache2 License compatible -->
<exclusion>
<groupId>edu.ucar</groupId>
<artifactId>netcdf</artifactId>
</exclusion>
<!-- Not Apache2 License compatible -->
<exclusion>
<groupId>edu.ucar</groupId>
<artifactId>cdm</artifactId>
</exclusion>
<!-- Not Apache2 License compatible -->
<exclusion>
<groupId>edu.ucar</groupId>
<artifactId>httpservices</artifactId>
</exclusion>
<!-- Not Apache2 License compatible -->
<exclusion>
<groupId>edu.ucar</groupId>
<artifactId>grib</artifactId>
</exclusion>
<!-- Not Apache2 License compatible -->
<exclusion>
<groupId>edu.ucar</groupId>
<artifactId>netcdf4</artifactId>
</exclusion>
<!-- Not Apache2 License compatible -->
<exclusion>
<groupId>com.uwyn</groupId>
<artifactId>jhighlight</artifactId>
</exclusion>
<!-- ES core already has these -->
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm-debug-all</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging-api</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
</plugin>
</plugins>
</build>
<repositories>
<!-- We need this repository when we depend on a parent pom which is a SNAPSHOT one -->
<repository>
<id>oss-snapshots</id>
<name>Sonatype OSS Snapshots</name>
<url>https://oss.sonatype.org/content/repositories/snapshots/</url>
</repository>
</repositories>
</project>

View File

@ -26,6 +26,7 @@ import org.apache.tika.Tika;
import org.apache.tika.language.LanguageIdentifier;
import org.apache.tika.metadata.Metadata;
import org.elasticsearch.Version;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
@ -58,7 +59,7 @@ import static org.elasticsearch.plugin.mapper.attachments.tika.TikaInstance.tika
* }
* }
* </pre>
* <p/>
* <p>
* _content_length = Specify the maximum amount of characters to extract from the attachment. If not specified, then the default for
* tika is 100,000 characters. Caution is required when setting large values as this can cause memory issues.
*/
@ -624,8 +625,9 @@ public class AttachmentMapper extends FieldMapper {
}
@Override
@SuppressWarnings("unchecked")
public Iterator<Mapper> iterator() {
List<FieldMapper> extras = Arrays.asList(
List<Mapper> extras = Arrays.asList(
contentMapper,
dateMapper,
titleMapper,
@ -635,7 +637,7 @@ public class AttachmentMapper extends FieldMapper {
contentTypeMapper,
contentLengthMapper,
languageMapper);
return CollectionUtils.concat(super.iterator(), extras.iterator());
return Iterators.concat(super.iterator(), extras.iterator());
}
@Override

View File

@ -1,40 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.attachment;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettings;
/**
*
*/
public class RegisterAttachmentType extends AbstractIndexComponent {
@Inject
public RegisterAttachmentType(Index index, @IndexSettings Settings indexSettings, MapperService mapperService) {
super(index, indexSettings);
mapperService.documentMapperParser().putTypeParser("attachment", new AttachmentMapper.TypeParser());
}
}

View File

@ -1,34 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugin.mapper.attachments;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.index.mapper.attachment.RegisterAttachmentType;
/**
*
*/
public class AttachmentsIndexModule extends AbstractModule {
@Override
protected void configure() {
bind(RegisterAttachmentType.class).asEagerSingleton();
}
}

View File

@ -19,16 +19,10 @@
package org.elasticsearch.plugin.mapper.attachments;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.attachment.AttachmentMapper;
import org.elasticsearch.plugins.Plugin;
import java.util.Collection;
import java.util.Collections;
/**
*
*/
public class MapperAttachmentsPlugin extends Plugin {
@Override
@ -42,7 +36,7 @@ public class MapperAttachmentsPlugin extends Plugin {
}
@Override
public Collection<Module> indexModules(Settings indexSettings) {
return Collections.<Module>singletonList(new AttachmentsIndexModule());
public void onIndexService(IndexService indexService) {
indexService.mapperService().documentMapperParser().putTypeParser("attachment", new AttachmentMapper.TypeParser());
}
}

View File

@ -22,23 +22,19 @@ package org.elasticsearch.index.mapper.attachment.test;
import org.apache.lucene.util.Constants;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.EnvironmentModule;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNameModule;
import org.elasticsearch.index.analysis.AnalysisModule;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettingsModule;
import org.elasticsearch.index.similarity.SimilarityLookupService;
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.test.IndexSettingsModule;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.Locale;
import static com.carrotsearch.randomizedtesting.RandomizedTest.assumeTrue;
@ -46,56 +42,18 @@ import static org.elasticsearch.plugin.mapper.attachments.tika.LocaleChecker.isL
public class MapperTestUtils {
public static MapperService newMapperService(Path tempDir) {
return newMapperService(new Index("test"), Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("path.home", tempDir)
.build());
}
public static MapperService newMapperService(Index index, Settings indexSettings) {
return new MapperService(index,
indexSettings,
newAnalysisService(indexSettings),
newSimilarityLookupService(indexSettings),
null);
}
public static AnalysisService newAnalysisService(Path tempDir) {
return newAnalysisService(Settings.builder()
.put("path.home", tempDir)
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
}
public static AnalysisService newAnalysisService(Settings indexSettings) {
Injector parentInjector = new ModulesBuilder().add(new SettingsModule(indexSettings), new EnvironmentModule(new Environment(indexSettings))).createInjector();
Index index = new Index("test");
Injector injector = new ModulesBuilder().add(
new IndexSettingsModule(index, indexSettings),
new IndexNameModule(index),
new AnalysisModule(indexSettings, parentInjector.getInstance(IndicesAnalysisService.class))).createChildInjector(parentInjector);
return injector.getInstance(AnalysisService.class);
}
public static SimilarityLookupService newSimilarityLookupService(Settings indexSettings) {
return new SimilarityLookupService(new Index("test"), indexSettings);
}
public static DocumentMapperParser newMapperParser(Path tempDir) {
return newMapperParser(Settings.builder()
.put("path.home", tempDir)
.build());
}
public static DocumentMapperParser newMapperParser(Settings settings) {
Settings forcedSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(settings)
.build();
MapperService mapperService = new MapperService(new Index("test"), forcedSettings, newAnalysisService(forcedSettings), newSimilarityLookupService(forcedSettings), null);
return new DocumentMapperParser(forcedSettings, mapperService, MapperTestUtils.newAnalysisService(forcedSettings), null, null);
public static MapperService newMapperService(Path tempDir, Settings indexSettings) throws IOException {
Settings nodeSettings = Settings.builder()
.put("path.home", tempDir)
.build();
indexSettings = Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(indexSettings)
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("test"), indexSettings, Collections.emptyList());
AnalysisService analysisService = new AnalysisRegistry(null, new Environment(nodeSettings)).build(idxSettings);
SimilarityService similarityService = new SimilarityService(idxSettings, Collections.emptyMap());
return new MapperService(idxSettings, analysisService, similarityService);
}
/**

View File

@ -87,7 +87,7 @@ public class StandaloneRunner extends CliTool {
this.size = size;
this.url = url;
this.base64text = base64text;
DocumentMapperParser mapperParser = MapperTestUtils.newMapperParser(PathUtils.get(".")); // use CWD b/c it won't be used
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(PathUtils.get("."), Settings.EMPTY).documentMapperParser(); // use CWD b/c it won't be used
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/standalone/standalone-mapping.json");

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.attachment.test.unit;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.attachment.AttachmentMapper;
@ -38,12 +39,11 @@ public class DateAttachmentMapperTests extends AttachmentUnitTestCase {
private DocumentMapperParser mapperParser;
@Before
public void setupMapperParser() {
mapperParser = MapperTestUtils.newMapperParser(createTempDir());
public void setupMapperParser() throws Exception {
mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
}
@Test
public void testSimpleMappings() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/date/date-mapping.json");
DocumentMapper docMapper = mapperParser.parse(mapping);

View File

@ -43,9 +43,8 @@ import static org.hamcrest.Matchers.*;
*/
public class EncryptedDocMapperTest extends AttachmentUnitTestCase {
@Test
public void testMultipleDocsEncryptedLast() throws IOException {
DocumentMapperParser mapperParser = MapperTestUtils.newMapperParser(createTempDir());
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json");
@ -75,9 +74,8 @@ public class EncryptedDocMapperTest extends AttachmentUnitTestCase {
assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().names().indexName()), nullValue());
}
@Test
public void testMultipleDocsEncryptedFirst() throws IOException {
DocumentMapperParser mapperParser = MapperTestUtils.newMapperParser(createTempDir());
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json");
@ -107,40 +105,31 @@ public class EncryptedDocMapperTest extends AttachmentUnitTestCase {
assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().names().indexName()).numericValue().longValue(), is(344L));
}
@Test(expected = MapperParsingException.class)
public void testMultipleDocsEncryptedNotIgnoringErrors() throws IOException {
DocumentMapperParser mapperParser = MapperTestUtils.newMapperParser(
Settings.builder()
.put("path.home", createTempDir())
.put("index.mapping.attachment.ignore_errors", false)
.build());
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
try {
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(),
Settings.builder()
.put("index.mapping.attachment.ignore_errors", false)
.build()).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json");
DocumentMapper docMapper = mapperParser.parse(mapping);
byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/htmlWithValidDateMeta.html");
byte[] pdf = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/encrypted.pdf");
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/encrypted/test-mapping.json");
DocumentMapper docMapper = mapperParser.parse(mapping);
byte[] html = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/htmlWithValidDateMeta.html");
byte[] pdf = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/attachment/test/sample-files/encrypted.pdf");
BytesReference json = jsonBuilder()
BytesReference json = jsonBuilder()
.startObject()
.field("file1", pdf)
.field("file2", html)
.endObject().bytes();
ParseContext.Document doc = docMapper.parse("person", "person", "1", json).rootDoc();
assertThat(doc.get(docMapper.mappers().getMapper("file1").fieldType().names().indexName()), nullValue());
assertThat(doc.get(docMapper.mappers().getMapper("file1.title").fieldType().names().indexName()), nullValue());
assertThat(doc.get(docMapper.mappers().getMapper("file1.author").fieldType().names().indexName()), nullValue());
assertThat(doc.get(docMapper.mappers().getMapper("file1.keywords").fieldType().names().indexName()), nullValue());
assertThat(doc.get(docMapper.mappers().getMapper("file1.content_type").fieldType().names().indexName()), nullValue());
assertThat(doc.getField(docMapper.mappers().getMapper("file1.content_length").fieldType().names().indexName()), nullValue());
assertThat(doc.get(docMapper.mappers().getMapper("file2").fieldType().names().indexName()), containsString("World"));
assertThat(doc.get(docMapper.mappers().getMapper("file2.title").fieldType().names().indexName()), equalTo("Hello"));
assertThat(doc.get(docMapper.mappers().getMapper("file2.author").fieldType().names().indexName()), equalTo("kimchy"));
assertThat(doc.get(docMapper.mappers().getMapper("file2.keywords").fieldType().names().indexName()), equalTo("elasticsearch,cool,bonsai"));
assertThat(doc.get(docMapper.mappers().getMapper("file2.content_type").fieldType().names().indexName()), equalTo("text/html; charset=ISO-8859-1"));
assertThat(doc.getField(docMapper.mappers().getMapper("file2.content_length").fieldType().names().indexName()).numericValue().longValue(), is(344L));
docMapper.parse("person", "person", "1", json);
fail("Expected doc parsing exception");
} catch (MapperParsingException e) {
// TODO: check the error message...getting security problems atm
//assertTrue(e.getMessage(), e.getMessage().contains())
}
}
}

View File

@ -51,11 +51,10 @@ public class LanguageDetectionAttachmentMapperTests extends AttachmentUnitTestCa
}
public void setupMapperParser(boolean langDetect) throws IOException {
DocumentMapperParser mapperParser = MapperTestUtils.newMapperParser(
Settings.settingsBuilder()
.put("path.home", createTempDir())
.put("index.mapping.attachment.detect_language", langDetect)
.build());
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(),
Settings.settingsBuilder()
.put("index.mapping.attachment.detect_language", langDetect)
.build()).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/language/language-mapping.json");
docMapper = mapperParser.parse(mapping);
@ -84,38 +83,31 @@ public class LanguageDetectionAttachmentMapperTests extends AttachmentUnitTestCa
assertThat(doc.get(docMapper.mappers().getMapper("file.language").fieldType().names().indexName()), equalTo(expected));
}
@Test
public void testFrDetection() throws Exception {
testLanguage("text-in-french.txt", "fr");
}
@Test
public void testEnDetection() throws Exception {
testLanguage("text-in-english.txt", "en");
}
@Test
public void testFrForced() throws Exception {
testLanguage("text-in-english.txt", "fr", "fr");
}
/**
* This test gives strange results! detection of ":-)" gives "lt" as a result
* @throws Exception
*/
@Test
public void testNoLanguage() throws Exception {
testLanguage("text-in-nolang.txt", "lt");
}
@Test
public void testLangDetectDisabled() throws Exception {
// We replace the mapper with another one which have index.mapping.attachment.detect_language = false
setupMapperParser(false);
testLanguage("text-in-english.txt", null);
}
@Test
public void testLangDetectDocumentEnabled() throws Exception {
// We replace the mapper with another one which have index.mapping.attachment.detect_language = false
setupMapperParser(false);

View File

@ -46,7 +46,7 @@ public class MetadataMapperTest extends AttachmentUnitTestCase {
.put(this.testSettings)
.put(otherSettings)
.build();
DocumentMapperParser mapperParser = MapperTestUtils.newMapperParser(settings);
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), settings).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/metadata/test-mapping.json");
@ -76,32 +76,30 @@ public class MetadataMapperTest extends AttachmentUnitTestCase {
assertThat(doc.getField(docMapper.mappers().getMapper("file.content_length").fieldType().names().indexName()).numericValue().longValue(), is(expectedLength));
}
@Test
public void testIgnoreWithoutDate() throws Exception {
checkMeta("htmlWithoutDateMeta.html", Settings.builder().build(), null, 300L);
}
@Test
public void testIgnoreWithEmptyDate() throws Exception {
checkMeta("htmlWithEmptyDateMeta.html", Settings.builder().build(), null, 334L);
}
@Test
public void testIgnoreWithCorrectDate() throws Exception {
checkMeta("htmlWithValidDateMeta.html", Settings.builder().build(), 1354233600000L, 344L);
}
@Test
public void testWithoutDate() throws Exception {
checkMeta("htmlWithoutDateMeta.html", Settings.builder().put("index.mapping.attachment.ignore_errors", false).build(), null, 300L);
}
@Test(expected = MapperParsingException.class)
public void testWithEmptyDate() throws Exception {
checkMeta("htmlWithEmptyDateMeta.html", Settings.builder().put("index.mapping.attachment.ignore_errors", false).build(), null, null);
try {
checkMeta("htmlWithEmptyDateMeta.html", Settings.builder().put("index.mapping.attachment.ignore_errors", false).build(), null, null);
} catch (MapperParsingException e) {
throw e;
}
}
@Test
public void testWithCorrectDate() throws Exception {
checkMeta("htmlWithValidDateMeta.html", Settings.builder().put("index.mapping.attachment.ignore_errors", false).build(), 1354233600000L, 344L);
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper.attachment.test.unit;
import org.elasticsearch.common.Base64;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
@ -48,8 +49,8 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase {
private ThreadPool threadPool;
@Before
public void setupMapperParser() {
mapperParser = MapperTestUtils.newMapperParser(createTempDir());
public void setupMapperParser() throws Exception {
mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
}
@ -59,7 +60,6 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase {
terminate(threadPool);
}
@Test
public void testSimpleMappings() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/multifield/multifield-mapping.json");
DocumentMapper docMapper = mapperParser.parse(mapping);
@ -87,7 +87,6 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase {
assertThat(docMapper.mappers().getMapper("file.content_type.suggest"), instanceOf(StringFieldMapper.class));
}
@Test
public void testExternalValues() throws Exception {
String originalText = "This is an elasticsearch mapper attachment test.";
String contentType = "text/plain; charset=ISO-8859-1";
@ -96,7 +95,7 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase {
String bytes = Base64.encodeBytes(originalText.getBytes(StandardCharsets.ISO_8859_1));
threadPool = new ThreadPool("testing-only");
MapperService mapperService = MapperTestUtils.newMapperService(createTempDir());
MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY);
mapperService.documentMapperParser().putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/multifield/multifield-mapping.json");

View File

@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.*;
public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase {
public void testSimpleMappings() throws Exception {
DocumentMapperParser mapperParser = MapperTestUtils.newMapperParser(createTempDir());
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json");
DocumentMapper docMapper = mapperParser.parse(mapping);
@ -67,10 +67,10 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase {
}
public void testContentBackcompat() throws Exception {
DocumentMapperParser mapperParser = MapperTestUtils.newMapperParser(Settings.builder()
.put("path.home", createTempDir())
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
.build());
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(),
Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
.build()).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping.json");
DocumentMapper docMapper = mapperParser.parse(mapping);
@ -84,10 +84,9 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase {
/**
* test for https://github.com/elastic/elasticsearch-mapper-attachments/issues/179
* @throws Exception
*/
public void testSimpleMappingsWithAllFields() throws Exception {
DocumentMapperParser mapperParser = MapperTestUtils.newMapperParser(createTempDir());
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/simple/test-mapping-all-fields.json");
DocumentMapper docMapper = mapperParser.parse(mapping);

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.attachment.test.unit;
import org.apache.tika.Tika;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.ParseContext;
@ -49,7 +50,7 @@ public class VariousDocTest extends AttachmentUnitTestCase {
@Before
public void createMapper() throws IOException {
DocumentMapperParser mapperParser = MapperTestUtils.newMapperParser(createTempDir());
DocumentMapperParser mapperParser = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY).documentMapperParser();
mapperParser.putTypeParser(AttachmentMapper.CONTENT_TYPE, new AttachmentMapper.TypeParser());
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/various-doc/test-mapping.json");
@ -59,7 +60,6 @@ public class VariousDocTest extends AttachmentUnitTestCase {
/**
* Test for https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/104
*/
@Test
public void testWordDocxDocument104() throws Exception {
assertParseable("issue-104.docx");
testMapper("issue-104.docx", false);
@ -68,7 +68,6 @@ public class VariousDocTest extends AttachmentUnitTestCase {
/**
* Test for encrypted PDF
*/
@Test
public void testEncryptedPDFDocument() throws Exception {
assertException("encrypted.pdf");
// TODO Remove when this will be fixed in Tika. See https://issues.apache.org/jira/browse/TIKA-1548
@ -79,7 +78,6 @@ public class VariousDocTest extends AttachmentUnitTestCase {
/**
* Test for HTML
*/
@Test
public void testHtmlDocument() throws Exception {
assertParseable("htmlWithEmptyDateMeta.html");
testMapper("htmlWithEmptyDateMeta.html", false);
@ -88,7 +86,6 @@ public class VariousDocTest extends AttachmentUnitTestCase {
/**
* Test for XHTML
*/
@Test
public void testXHtmlDocument() throws Exception {
assertParseable("testXHTML.html");
testMapper("testXHTML.html", false);
@ -97,7 +94,6 @@ public class VariousDocTest extends AttachmentUnitTestCase {
/**
* Test for TXT
*/
@Test
public void testTxtDocument() throws Exception {
assertParseable("text-in-english.txt");
testMapper("text-in-english.txt", false);
@ -107,7 +103,6 @@ public class VariousDocTest extends AttachmentUnitTestCase {
* Test for ASCIIDOC
* Not yet supported by Tika: https://github.com/elasticsearch/elasticsearch-mapper-attachments/issues/29
*/
@Test
public void testAsciidocDocument() throws Exception {
assertParseable("asciidoc.asciidoc");
testMapper("asciidoc.asciidoc", false);

View File

@ -1,404 +0,0 @@
<?xml version="1.0"?>
<project name="elasticsearch-integration-tests">
<!-- our pid file for easy cleanup -->
<property name="integ.pidfile" location="${integ.scratch}/es.pid"/>
<!-- if this exists, ES is running (maybe) -->
<available property="integ.pidfile.exists" file="${integ.pidfile}"/>
<!-- name of our cluster, maybe needs changing -->
<property name="integ.cluster.name" value="prepare_release"/>
<!-- runs an OS script -->
<macrodef name="run-script">
<attribute name="script"/>
<attribute name="spawn" default="false"/>
<element name="nested" optional="true"/>
<sequential>
<local name="failonerror"/>
<condition property="failonerror">
<isfalse value="@{spawn}"/>
</condition>
<!-- create a temp CWD, to enforce that commands don't rely on CWD -->
<local name="temp.cwd"/>
<tempfile property="temp.cwd" destDir="${integ.temp}"/>
<mkdir dir="${temp.cwd}"/>
<!-- print commands we run -->
<local name="script.base"/>
<basename file="@{script}" property="script.base"/>
<!-- crappy way to output, but we need it. make it nice later -->
<echoxml><exec script="${script.base}"><nested/></exec></echoxml>
<exec executable="cmd" osfamily="winnt" dir="${temp.cwd}" failonerror="${failonerror}" spawn="@{spawn}" taskname="${script.base}">
<arg value="/c"/>
<arg value="&quot;"/>
<arg value="@{script}.bat"/>
<nested/>
<arg value="&quot;"/>
</exec>
<exec executable="sh" osfamily="unix" dir="${temp.cwd}" failonerror="${failonerror}" spawn="@{spawn}" taskname="${script.base}">
<arg value="@{script}"/>
<nested/>
</exec>
</sequential>
</macrodef>
<!-- extracts PID from file -->
<macrodef name="extract-pid">
<attribute name="file"/>
<attribute name="property"/>
<sequential>
<loadfile srcFile="@{file}" property="@{property}">
<filterchain>
<striplinebreaks/>
</filterchain>
</loadfile>
</sequential>
</macrodef>
<!-- applies transformations to src and stores in dst -->
<macrodef name="filter-property">
<attribute name="src"/>
<attribute name="dest"/>
<element name="chain"/>
<sequential>
<loadresource property="@{dest}">
<propertyresource name="@{src}"/>
<filterchain>
<tokenfilter>
<chain/>
</tokenfilter>
</filterchain>
</loadresource>
</sequential>
</macrodef>
<!-- installs a plugin into elasticsearch -->
<macrodef name="install-plugin">
<attribute name="home" default="${integ.scratch}/elasticsearch-${elasticsearch.version}"/>
<attribute name="name"/>
<attribute name="file"/>
<sequential>
<local name="url"/>
<makeurl property="url" file="@{file}"/>
<!-- install plugin -->
<echo>Installing plugin @{name}...</echo>
<run-script script="@{home}/bin/plugin">
<nested>
<arg value="install"/>
<arg value="${url}"/>
</nested>
</run-script>
<fail message="did not find plugin installed as @{name}">
<condition>
<not>
<resourceexists>
<file file="@{home}/plugins/@{name}"/>
</resourceexists>
</not>
</condition>
</fail>
</sequential>
</macrodef>
<!-- waits for elasticsearch to start -->
<macrodef name="waitfor-elasticsearch">
<attribute name="port"/>
<attribute name="timeoutproperty"/>
<sequential>
<echo>Waiting for elasticsearch to become available on port @{port}...</echo>
<waitfor maxwait="30" maxwaitunit="second"
checkevery="500" checkeveryunit="millisecond"
timeoutproperty="@{timeoutproperty}">
<http url="http://localhost:@{port}"/>
</waitfor>
</sequential>
</macrodef>
<!-- waits for cluster to form and have exactly two nodes -->
<macrodef name="waitfor-two-nodes">
<attribute name="port"/>
<attribute name="timeoutproperty"/>
<sequential>
<echo>Waiting for elasticsearch to form a cluster of two...</echo>
<waitfor maxwait="30" maxwaitunit="second"
checkevery="500" checkeveryunit="millisecond"
timeoutproperty="@{timeoutproperty}">
<http url="http://localhost:@{port}/_cluster/health?wait_for_nodes=2"/>
</waitfor>
</sequential>
</macrodef>
<!-- start elasticsearch and wait until its ready -->
<macrodef name="startup-elasticsearch">
<attribute name="home" default="${integ.scratch}/elasticsearch-${elasticsearch.version}"/>
<attribute name="spawn" default="true"/>
<attribute name="args" default="${integ.args}"/>
<attribute name="es.unicast.hosts" default="localhost:${integ.transport.port}"/>
<attribute name="es.cluster.name" default="${integ.cluster.name}"/>
<attribute name="es.http.port" default="${integ.http.port}"/>
<attribute name="es.transport.tcp.port" default="${integ.transport.port}"/>
<attribute name="es.pidfile" default="${integ.pidfile}"/>
<attribute name="jvm.args" default="${tests.jvm.argline}"/>
<element name="additional-args" optional="true"/>
<sequential>
<!-- make sure no elasticsearch instance is currently running and listening on the port we need -->
<fail message="This test expects port @{es.http.port} to be free but an elasticsearch instance is already running and listening on that port.
Maybe the last test run did not manage to shut down the node correctly?
You must kill it before tests can run.">
<condition>
<socket server="localhost" port="@{es.http.port}"></socket>
</condition>
</fail>
<!-- run bin/elasticsearch with args -->
<echo>Starting up external cluster...</echo>
<run-script script="@{home}/bin/elasticsearch"
spawn="@{spawn}">
<nested>
<env key="JAVA_HOME" value="${java.home}"/>
<!-- we pass these as gc options, even if they arent, to avoid conflicting gc options -->
<env key="ES_GC_OPTS" value="@{jvm.args}"/>
<arg value="-Des.cluster.name=@{es.cluster.name}"/>
<arg value="-Des.http.port=@{es.http.port}"/>
<arg value="-Des.transport.tcp.port=@{es.transport.tcp.port}"/>
<arg value="-Des.pidfile=@{es.pidfile}"/>
<arg value="-Des.discovery.zen.ping.unicast.hosts=@{es.unicast.hosts}"/>
<arg value="-Des.path.repo=@{home}/repo"/>
<arg value="-Des.path.shared_data=@{home}/../"/>
<arg value="-Des.script.inline=on"/>
<arg value="-Des.script.indexed=on"/>
<arg value="-Des.repositories.url.allowed_urls=http://snapshot.test*"/>
<additional-args/>
</nested>
</run-script>
<!-- wait for startup -->
<local name="failed.to.start"/>
<waitfor-elasticsearch port="@{es.http.port}"
timeoutproperty="failed.to.start"/>
<!-- best effort, print console log. useful if it fails especially -->
<local name="log.contents"/>
<loadfile srcFile="@{home}/logs/@{es.cluster.name}.log"
property="log.contents"
failonerror="false"/>
<echo message="${log.contents}" taskname="elasticsearch"/>
<fail message="ES instance did not start" if="failed.to.start"/>
<local name="integ.pid"/>
<extract-pid file="@{es.pidfile}" property="integ.pid"/>
<echo>External node started PID ${integ.pid}</echo>
</sequential>
</macrodef>
<!-- Takes a plugin zip file and return the plugin name. For instance
'analysis-icu-2.0.0.zip' would return
'analysis-icu'. -->
<macrodef name="convert-plugin-name">
<attribute name="file"/>
<attribute name="outputproperty"/>
<sequential>
<local name="file.base"/>
<basename file="@{file}" property="file.base"/>
<filter-property src="file.base" dest="@{outputproperty}">
<chain>
<replacestring from="-${elasticsearch.version}.zip" to=""/>
</chain>
</filter-property>
</sequential>
</macrodef>
<macrodef name="stop-node">
<attribute name="es.pidfile" default="${integ.pidfile}"/>
<sequential>
<local name="integ.pid"/>
<extract-pid file="@{es.pidfile}" property="integ.pid"/>
<echo>Shutting down external node PID ${integ.pid}</echo>
<!-- verify with jps that this actually is the correct pid.
See if we can find the line "pid org.elasticsearch.bootstrap.Elasticsearch" in the output of jps -l.-->
<local name="jps.pidline"/>
<local name="jps.executable"/>
<local name="environment"/>
<property environment="environment"/>
<property name="jps.executable" location="${environment.JAVA_HOME}/bin/jps"/>
<exec executable="${jps.executable}" failonerror="true">
<arg value="-l"/>
<redirector outputproperty="jps.pidline">
<outputfilterchain>
<linecontains>
<contains value="${integ.pid} org.elasticsearch.bootstrap.Elasticsearch"/>
</linecontains>
</outputfilterchain>
</redirector>
</exec>
<fail
message="pid file at @{es.pidfile} is ${integ.pid} but jps -l did not report any process with org.elasticsearch.bootstrap.Elasticsearch and this pid.
Did you run mvn clean? Maybe an old pid file is still lying around.">
<condition>
<equals arg1="${jps.pidline}" arg2=""/>
</condition>
</fail>
<exec executable="taskkill" failonerror="true" osfamily="winnt">
<arg value="/F"/>
<arg value="/PID"/>
<arg value="${integ.pid}"/>
</exec>
<exec executable="kill" failonerror="true" osfamily="unix">
<arg value="-9"/>
<arg value="${integ.pid}"/>
</exec>
<delete file="@{es.pidfile}"/>
</sequential>
</macrodef>
<!-- starts a unicast node on an already setup workspace-->
<macrodef name="start-unicast-node">
<attribute name="es.http.port" default="${integ.http.port}"/>
<attribute name="es.transport.port" default="${integ.transport.port}"/>
<attribute name="es.pidfile" default="${integ.pidfile}"/>
<attribute name="es.peer.list" />
<sequential>
<startup-elasticsearch es.pidfile="@{es.pidfile}"
es.transport.tcp.port="@{es.transport.port}" es.http.port="@{es.http.port}"
es.unicast.hosts="@{es.peer.list}"/>
</sequential>
</macrodef>
<!-- unzip the elasticsearch zip -->
<target name="setup-workspace" depends="stop-external-cluster">
<sequential>
<delete dir="${integ.scratch}"/>
<unzip src="${integ.deps}/elasticsearch-${elasticsearch.version}.zip" dest="${integ.scratch}"/>
</sequential>
</target>
<!-- run elasticsearch in the foreground (for debugging etc) -->
<!-- TODO: doesn't belong here, but we will figure it out -->
<target name="start-foreground" depends="stop-external-cluster">
<delete dir="${integ.scratch}"/>
<unzip src="${project.build.directory}/releases/${project.artifactId}-${project.version}.zip" dest="${integ.scratch}"/>
<local name="home"/>
<property name="home" location="${integ.scratch}/${project.artifactId}-${elasticsearch.version}"/>
<startup-elasticsearch spawn="false" home="${home}"
jvm.args="${tests.jvm.argline} -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=8000"/>
</target>
<!-- unzip core release artifact, install plugin, then start ES -->
<target name="start-external-cluster-with-plugin" depends="setup-workspace">
<install-plugin name="${elasticsearch.plugin.name}" file="${project.build.directory}/releases/${project.artifactId}-${project.version}.zip"/>
<startup-elasticsearch/>
</target>
<!-- unzip core release artifact then start ES -->
<target name="start-external-cluster" depends="setup-workspace">
<startup-elasticsearch/>
</target>
<target name="stop-external-cluster" if="integ.pidfile.exists">
<stop-node/>
</target>
<!-- distribution tests: .zip -->
<target name="setup-workspace-zip" depends="stop-external-cluster">
<sequential>
<delete dir="${integ.scratch}"/>
<unzip src="${project.build.directory}/releases/${project.artifactId}-${project.version}.zip"
dest="${integ.scratch}"/>
</sequential>
</target>
<target name="start-external-cluster-zip" depends="setup-workspace-zip">
<startup-elasticsearch/>
</target>
<!-- distribution tests: .tar.gz -->
<target name="setup-workspace-tar" depends="stop-external-cluster">
<sequential>
<delete dir="${integ.scratch}"/>
<untar src="${project.build.directory}/releases/${project.artifactId}-${project.version}.tar.gz"
dest="${integ.scratch}"
compression="gzip"/>
</sequential>
</target>
<target name="start-external-cluster-tar" depends="setup-workspace-tar">
<startup-elasticsearch/>
</target>
<!-- distribution tests: .deb -->
<target name="setup-workspace-deb" depends="stop-external-cluster">
<sequential>
<delete dir="${integ.scratch}"/>
<mkdir dir="${integ.scratch}/deb-extracted"/>
<local name="debfile"/>
<property name="debfile" location="${project.build.directory}/releases/${project.artifactId}-${project.version}.deb"/>
<!-- print some basic package info -->
<exec executable="dpkg-deb" failonerror="true" taskname="deb-info">
<arg value="-I"/>
<arg value="${debfile}"/>
</exec>
<!-- extract contents from .deb package -->
<exec executable="dpkg-deb" failonerror="true">
<arg value="-x"/>
<arg value="${debfile}"/>
<arg value="${integ.scratch}/deb-extracted"/>
</exec>
</sequential>
</target>
<target name="start-external-cluster-deb" depends="setup-workspace-deb">
<startup-elasticsearch home="${integ.scratch}/deb-extracted/usr/share/elasticsearch/"/>
</target>
<!-- distribution tests: .rpm -->
<target name="setup-workspace-rpm" depends="stop-external-cluster">
<sequential>
<delete dir="${integ.scratch}"/>
<!-- use full paths with paranoia, we will be doing relocations -->
<local name="rpm.file"/>
<local name="rpm.database"/>
<local name="rpm.extracted"/>
<property name="rpm.file" location="${project.build.directory}/releases/${project.artifactId}-${project.version}.rpm"/>
<property name="rpm.database" location="${integ.scratch}/rpm-database"/>
<property name="rpm.extracted" location="${integ.scratch}/rpm-extracted"/>
<mkdir dir="${rpm.database}"/>
<mkdir dir="${rpm.extracted}"/>
<!-- print some basic package info -->
<exec executable="rpm" failonerror="true" taskname="rpm-info">
<arg value="-q"/>
<arg value="-i"/>
<arg value="-p"/>
<arg value="${rpm.file}"/>
</exec>
<!-- extract contents from .rpm package -->
<exec executable="rpm" failonerror="true" taskname="rpm">
<arg value="--dbpath"/>
<arg value="${rpm.database}"/>
<arg value="--badreloc"/>
<arg value="--relocate"/>
<arg value="/=${rpm.extracted}"/>
<arg value="--nodeps"/>
<arg value="--noscripts"/>
<arg value="--notriggers"/>
<arg value="-i"/>
<arg value="${rpm.file}"/>
</exec>
</sequential>
</target>
<target name="start-external-cluster-rpm" depends="setup-workspace-rpm">
<startup-elasticsearch home="${integ.scratch}/rpm-extracted/usr/share/elasticsearch/"/>
</target>
</project>