diff --git a/core/src/main/java/org/elasticsearch/Build.java b/core/src/main/java/org/elasticsearch/Build.java index cf3b7de05a6..508b4dc4375 100644 --- a/core/src/main/java/org/elasticsearch/Build.java +++ b/core/src/main/java/org/elasticsearch/Build.java @@ -19,14 +19,13 @@ package org.elasticsearch; -import org.elasticsearch.common.io.FastStringReader; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; import java.io.IOException; +import java.io.InputStream; import java.util.Properties; /** @@ -40,10 +39,9 @@ public class Build { String hashShort = "NA"; String timestamp = "NA"; - try { - String properties = Streams.copyToStringFromClasspath("/es-build.properties"); + try (InputStream is = Build.class.getResourceAsStream("/es-build.properties")){ Properties props = new Properties(); - props.load(new FastStringReader(properties)); + props.load(is); hash = props.getProperty("hash", hash); if (!hash.equals("NA")) { hashShort = hash.substring(0, 7); diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResponse.java index efbb19142c3..d3942038164 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/ShardUpgradeResponse.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.upgrade.post; +import org.elasticsearch.Version; import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,7 +33,9 @@ import java.text.ParseException; */ class ShardUpgradeResponse extends BroadcastShardResponse { - private org.apache.lucene.util.Version version; + private org.apache.lucene.util.Version oldestLuceneSegment; + + private Version upgradeVersion; private boolean primary; @@ -40,14 +43,19 @@ class ShardUpgradeResponse extends BroadcastShardResponse { ShardUpgradeResponse() { } - ShardUpgradeResponse(ShardId shardId, boolean primary, org.apache.lucene.util.Version version) { + ShardUpgradeResponse(ShardId shardId, boolean primary, Version upgradeVersion, org.apache.lucene.util.Version oldestLuceneSegment) { super(shardId); this.primary = primary; - this.version = version; + this.upgradeVersion = upgradeVersion; + this.oldestLuceneSegment = oldestLuceneSegment; } - public org.apache.lucene.util.Version version() { - return this.version; + public org.apache.lucene.util.Version oldestLuceneSegment() { + return this.oldestLuceneSegment; + } + + public Version upgradeVersion() { + return this.upgradeVersion; } public boolean primary() { @@ -59,18 +67,21 @@ class ShardUpgradeResponse extends BroadcastShardResponse { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); primary = in.readBoolean(); + upgradeVersion = Version.readVersion(in); try { - version = org.apache.lucene.util.Version.parse(in.readString()); + oldestLuceneSegment = org.apache.lucene.util.Version.parse(in.readString()); } catch (ParseException ex) { - throw new IOException("failed to parse lucene version [" + version + "]", ex); + throw new IOException("failed to parse lucene version [" + oldestLuceneSegment + "]", ex); } + } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBoolean(primary); - out.writeString(version.toString()); + Version.writeVersion(upgradeVersion, out); + out.writeString(oldestLuceneSegment.toString()); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index 9d3c346eca9..39a736019f4 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.admin.indices.upgrade.post; -import org.apache.lucene.util.Version; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.PrimaryMissingActionException; import org.elasticsearch.action.ShardOperationFailedException; @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.*; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.IndexShard; @@ -75,7 +76,7 @@ public class TransportUpgradeAction extends TransportBroadcastAction shardFailures = null; Map successfulPrimaryShards = newHashMap(); - Map versions = newHashMap(); + Map> versions = newHashMap(); for (int i = 0; i < shardsResponses.length(); i++) { Object shardResponse = shardsResponses.get(i); if (shardResponse == null) { @@ -94,20 +95,35 @@ public class TransportUpgradeAction extends TransportBroadcastAction versionTuple = versions.get(index); + if (versionTuple == null) { + versions.put(index, new Tuple<>(shardUpgradeResponse.upgradeVersion(), shardUpgradeResponse.oldestLuceneSegment())); + } else { + // We already have versions for this index - let's see if we need to update them based on the current shard + Version version = versionTuple.v1(); + org.apache.lucene.util.Version luceneVersion = versionTuple.v2(); + // For the metadata we are interested in the _latest_ elasticsearch version that was processing the metadata + // Since we rewrite the mapping during upgrade the metadata is always rewritten by the latest version + if (shardUpgradeResponse.upgradeVersion().after(versionTuple.v1())) { + version = shardUpgradeResponse.upgradeVersion(); + } + // For the lucene version we are interested in the _oldest_ lucene version since it determines the + // oldest version that we need to support + if (shardUpgradeResponse.oldestLuceneSegment().onOrAfter(versionTuple.v2()) == false) { + luceneVersion = shardUpgradeResponse.oldestLuceneSegment(); + } + versions.put(index, new Tuple<>(version, luceneVersion)); } } } - Map updatedVersions = newHashMap(); + Map> updatedVersions = newHashMap(); MetaData metaData = clusterState.metaData(); - for (Map.Entry versionEntry : versions.entrySet()) { + for (Map.Entry> versionEntry : versions.entrySet()) { String index = versionEntry.getKey(); Integer primaryCount = successfulPrimaryShards.get(index); int expectedPrimaryCount = metaData.index(index).getNumberOfShards(); if (primaryCount == metaData.index(index).getNumberOfShards()) { - updatedVersions.put(index, versionEntry.getValue().toString()); + updatedVersions.put(index, new Tuple<>(versionEntry.getValue().v1(), versionEntry.getValue().v2().toString())); } else { logger.warn("Not updating settings for the index [{}] because upgraded of some primary shards failed - expected[{}], received[{}]", index, expectedPrimaryCount, primaryCount == null ? 0 : primaryCount); @@ -130,8 +146,9 @@ public class TransportUpgradeAction extends TransportBroadcastAction versions; + private Map> versions; UpgradeResponse() { } - UpgradeResponse(Map versions, int totalShards, int successfulShards, int failedShards, List shardFailures) { + UpgradeResponse(Map> versions, int totalShards, int successfulShards, int failedShards, List shardFailures) { super(totalShards, successfulShards, failedShards, shardFailures); this.versions = versions; } @@ -55,8 +57,9 @@ public class UpgradeResponse extends BroadcastResponse { versions = newHashMap(); for (int i=0; i(upgradeVersion, oldestLuceneSegment)); } } @@ -64,13 +67,18 @@ public class UpgradeResponse extends BroadcastResponse { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(versions.size()); - for(Map.Entry entry : versions.entrySet()) { + for(Map.Entry> entry : versions.entrySet()) { out.writeString(entry.getKey()); - out.writeString(entry.getValue()); + Version.writeVersion(entry.getValue().v1(), out); + out.writeString(entry.getValue().v2()); } } - public Map versions() { + /** + * Returns the highest upgrade version of the node that performed metadata upgrade and the + * the version of the oldest lucene segment for each index that was upgraded. + */ + public Map> versions() { return versions; } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsClusterStateUpdateRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsClusterStateUpdateRequest.java index 7067f2f61ec..7df42951d08 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsClusterStateUpdateRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsClusterStateUpdateRequest.java @@ -19,7 +19,9 @@ package org.elasticsearch.action.admin.indices.upgrade.post; +import org.elasticsearch.Version; import org.elasticsearch.cluster.ack.ClusterStateUpdateRequest; +import org.elasticsearch.common.collect.Tuple; import java.util.Map; @@ -28,7 +30,7 @@ import java.util.Map; */ public class UpgradeSettingsClusterStateUpdateRequest extends ClusterStateUpdateRequest { - private Map versions; + private Map> versions; public UpgradeSettingsClusterStateUpdateRequest() { @@ -37,14 +39,14 @@ public class UpgradeSettingsClusterStateUpdateRequest extends ClusterStateUpdate /** * Returns the index to version map for indices that should be updated */ - public Map versions() { + public Map> versions() { return versions; } /** * Sets the index to version map for indices that should be updated */ - public UpgradeSettingsClusterStateUpdateRequest versions(Map versions) { + public UpgradeSettingsClusterStateUpdateRequest versions(Map> versions) { this.versions = versions; return this; } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequest.java index b191fa53539..7c3676f66e2 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequest.java @@ -19,8 +19,10 @@ package org.elasticsearch.action.admin.indices.upgrade.post; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -35,16 +37,17 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; */ public class UpgradeSettingsRequest extends AcknowledgedRequest { - - private Map versions; + private Map> versions; UpgradeSettingsRequest() { } /** * Constructs a new request to update minimum compatible version settings for one or more indices + * + * @param versions a map from index name to elasticsearch version, oldest lucene segment version tuple */ - public UpgradeSettingsRequest(Map versions) { + public UpgradeSettingsRequest(Map> versions) { this.versions = versions; } @@ -59,14 +62,14 @@ public class UpgradeSettingsRequest extends AcknowledgedRequest versions() { + Map> versions() { return versions; } /** * Sets the index versions to be updated */ - public UpgradeSettingsRequest versions(Map versions) { + public UpgradeSettingsRequest versions(Map> versions) { this.versions = versions; return this; } @@ -79,8 +82,9 @@ public class UpgradeSettingsRequest extends AcknowledgedRequest(upgradeVersion, oldestLuceneSegment)); } readTimeout(in); } @@ -89,9 +93,10 @@ public class UpgradeSettingsRequest extends AcknowledgedRequest entry : versions.entrySet()) { + for(Map.Entry> entry : versions.entrySet()) { out.writeString(entry.getKey()); - out.writeString(entry.getValue()); + Version.writeVersion(entry.getValue().v1(), out); + out.writeString(entry.getValue().v2()); } writeTimeout(out); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java index 74c42a5fe80..9ce5aeb2d2d 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsRequestBuilder.java @@ -19,8 +19,10 @@ package org.elasticsearch.action.admin.indices.upgrade.post; +import org.elasticsearch.Version; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.collect.Tuple; import java.util.Map; @@ -36,7 +38,7 @@ public class UpgradeSettingsRequestBuilder extends AcknowledgedRequestBuilder versions) { + public UpgradeSettingsRequestBuilder setVersions(Map> versions) { request.versions(versions); return this; } diff --git a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index b1371e18a32..97620829f3a 100644 --- a/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -211,12 +211,7 @@ public class TransportIndexAction extends TransportReplicationAction pre20HashFunction; private final Boolean pre20UseType; + private final ScriptService scriptService; @Inject - public MetaDataIndexUpgradeService(Settings settings) { + public MetaDataIndexUpgradeService(Settings settings, ScriptService scriptService) { super(settings); - + this.scriptService = scriptService; final String pre20HashFunctionName = settings.get(DEPRECATED_SETTING_ROUTING_HASH_FUNCTION, null); final boolean hasCustomPre20HashFunction = pre20HashFunctionName != null; // the hash function package has changed we replace the two hash functions if their fully qualified name is used. @@ -83,12 +92,24 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { */ public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData) { // Throws an exception if there are too-old segments: + if (isUpgraded(indexMetaData)) { + return indexMetaData; + } checkSupportedVersion(indexMetaData); IndexMetaData newMetaData = upgradeLegacyRoutingSettings(indexMetaData); newMetaData = addDefaultUnitsIfNeeded(newMetaData); + checkMappingsCompatibility(newMetaData); + newMetaData = markAsUpgraded(newMetaData); return newMetaData; } + /** + * Checks if the index was already opened by this version of Elasticsearch and doesn't require any additional checks. + */ + private boolean isUpgraded(IndexMetaData indexMetaData) { + return indexMetaData.upgradeVersion().onOrAfter(Version.V_2_0_0_beta1); + } + /** * Elasticsearch 2.0 no longer supports indices with pre Lucene v4.0 (Elasticsearch v 0.90.0) segments. All indices * that were created before Elasticsearch v0.90.0 should be upgraded using upgrade plugin before they can @@ -239,4 +260,66 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { // No changes: return indexMetaData; } + + + /** + * Checks the mappings for compatibility with the current version + */ + private void checkMappingsCompatibility(IndexMetaData indexMetaData) { + Index index = new Index(indexMetaData.getIndex()); + Settings settings = indexMetaData.settings(); + try { + SimilarityLookupService similarityLookupService = new SimilarityLookupService(index, settings); + // We cannot instantiate real analysis server at this point because the node might not have + // been started yet. However, we don't really need real analyzers at this stage - so we can fake it + try (AnalysisService analysisService = new FakeAnalysisService(index, settings)) { + try (MapperService mapperService = new MapperService(index, settings, analysisService, similarityLookupService, scriptService)) { + for (ObjectCursor cursor : indexMetaData.getMappings().values()) { + MappingMetaData mappingMetaData = cursor.value; + mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), false, false); + } + } + } + } catch (Exception ex) { + // Wrap the inner exception so we have the index name in the exception message + throw new IllegalStateException("unable to upgrade the mappings for the index [" + indexMetaData.getIndex() + "], reason: [" + ex.getMessage() + "]", ex); + } + } + + /** + * Marks index as upgraded so we don't have to test it again + */ + private IndexMetaData markAsUpgraded(IndexMetaData indexMetaData) { + Settings settings = Settings.builder().put(indexMetaData.settings()).put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.CURRENT).build(); + return IndexMetaData.builder(indexMetaData).settings(settings).build(); + } + + /** + * A fake analysis server that returns the same keyword analyzer for all requests + */ + private static class FakeAnalysisService extends AnalysisService { + + private Analyzer fakeAnalyzer = new Analyzer() { + @Override + protected TokenStreamComponents createComponents(String fieldName) { + throw new UnsupportedOperationException("shouldn't be here"); + } + }; + + public FakeAnalysisService(Index index, Settings indexSettings) { + super(index, indexSettings); + } + + @Override + public NamedAnalyzer analyzer(String name) { + return new NamedAnalyzer(name, fakeAnalyzer); + } + + @Override + public void close() { + fakeAnalyzer.close(); + super.close(); + } + } + } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index d23c571faa7..9fbe2c5e06c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -334,7 +335,7 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements @Override public ClusterState execute(ClusterState currentState) { MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData()); - for (Map.Entry entry : request.versions().entrySet()) { + for (Map.Entry> entry : request.versions().entrySet()) { String index = entry.getKey(); IndexMetaData indexMetaData = metaDataBuilder.get(index); if (indexMetaData != null) { @@ -342,8 +343,8 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements // No reason to pollute the settings, we didn't really upgrade anything metaDataBuilder.put(IndexMetaData.builder(indexMetaData) .settings(settingsBuilder().put(indexMetaData.settings()) - .put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, entry.getValue()) - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.CURRENT) + .put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, entry.getValue().v2()) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, entry.getValue().v1()) ) ); } diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java b/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java index 72630e6d955..ce36c243035 100644 --- a/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java +++ b/core/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.util.Collection; import java.util.Map; /** @@ -48,10 +49,17 @@ public interface BlobContainer { /** * Deletes a blob with giving name. * - * If blob exist but cannot be deleted an exception has to be thrown. + * If a blob exists but cannot be deleted an exception has to be thrown. */ void deleteBlob(String blobName) throws IOException; + /** + * Deletes blobs with giving names. + * + * If a blob exists but cannot be deleted an exception has to be thrown. + */ + void deleteBlobs(Collection blobNames) throws IOException; + /** * Deletes all blobs in the container that match the specified prefix. */ diff --git a/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java b/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java index 01ad86133aa..44f44f23ffd 100644 --- a/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java +++ b/core/src/main/java/org/elasticsearch/common/blobstore/support/AbstractBlobContainer.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import java.io.IOException; +import java.util.Collection; import java.util.Map; /** @@ -50,4 +51,11 @@ public abstract class AbstractBlobContainer implements BlobContainer { deleteBlob(blob.name()); } } + + @Override + public void deleteBlobs(Collection blobNames) throws IOException { + for(String blob: blobNames) { + deleteBlob(blob); + } + } } diff --git a/core/src/main/java/org/elasticsearch/common/io/Streams.java b/core/src/main/java/org/elasticsearch/common/io/Streams.java index b5f224e72f0..166e7960b3c 100644 --- a/core/src/main/java/org/elasticsearch/common/io/Streams.java +++ b/core/src/main/java/org/elasticsearch/common/io/Streams.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.io; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.util.Callback; import java.io.*; @@ -184,34 +183,6 @@ public abstract class Streams { return out.toString(); } - public static String copyToStringFromClasspath(ClassLoader classLoader, String path) throws IOException { - InputStream is = classLoader.getResourceAsStream(path); - if (is == null) { - throw new FileNotFoundException("Resource [" + path + "] not found in classpath with class loader [" + classLoader + "]"); - } - return copyToString(new InputStreamReader(is, Charsets.UTF_8)); - } - - public static String copyToStringFromClasspath(String path) throws IOException { - InputStream is = Streams.class.getResourceAsStream(path); - if (is == null) { - throw new FileNotFoundException("Resource [" + path + "] not found in classpath"); - } - return copyToString(new InputStreamReader(is, Charsets.UTF_8)); - } - - public static byte[] copyToBytesFromClasspath(String path) throws IOException { - try (InputStream is = Streams.class.getResourceAsStream(path)) { - if (is == null) { - throw new FileNotFoundException("Resource [" + path + "] not found in classpath"); - } - try (BytesStreamOutput out = new BytesStreamOutput()) { - copy(is, out); - return out.bytes().toBytes(); - } - } - } - public static int readFully(Reader reader, char[] dest) throws IOException { return readFully(reader, dest, 0, dest.length); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 0423f5520b2..7f01a1e98e6 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -63,6 +63,7 @@ import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.script.ScriptService; +import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -78,7 +79,7 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; /** * */ -public class MapperService extends AbstractIndexComponent { +public class MapperService extends AbstractIndexComponent implements Closeable { public static final String DEFAULT_MAPPING = "_default_"; private static ObjectHashSet META_FIELDS = ObjectHashSet.from( diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 6ee6504bf42..c4eecad452c 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1403,9 +1403,11 @@ public class IndexShard extends AbstractIndexShardComponent { * Syncs the given location with the underlying storage unless already synced. */ public void sync(Translog.Location location) { - final Engine engine = engine(); try { + final Engine engine = engine(); engine.getTranslog().ensureSynced(location); + } catch (EngineClosedException ex) { + // that's fine since we already synced everything on engine close - this also is conform with the methods documentation } catch (IOException ex) { // if this fails we are in deep shit - fail the request logger.debug("failed to sync translog", ex); throw new ElasticsearchException("failed to sync translog", ex); diff --git a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java index cce7e44d0f9..042f3c3f5ef 100644 --- a/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java +++ b/core/src/main/java/org/elasticsearch/index/snapshots/blobstore/BlobStoreIndexShardRepository.java @@ -352,33 +352,38 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements */ protected void finalize(List snapshots, int fileListGeneration, Map blobs) { BlobStoreIndexShardSnapshots newSnapshots = new BlobStoreIndexShardSnapshots(snapshots); + List blobsToDelete = newArrayList(); // delete old index files first for (String blobName : blobs.keySet()) { // delete old file lists if (indexShardSnapshotsFormat.isTempBlobName(blobName) || blobName.startsWith(SNAPSHOT_INDEX_PREFIX)) { - try { - blobContainer.deleteBlob(blobName); - } catch (IOException e) { - // We cannot delete index file - this is fatal, we cannot continue, otherwise we might end up - // with references to non-existing files - throw new IndexShardSnapshotFailedException(shardId, "error deleting index file [{}] during cleanup", e); - } + blobsToDelete.add(blobName); } } + try { + blobContainer.deleteBlobs(blobsToDelete); + } catch (IOException e) { + // We cannot delete index file - this is fatal, we cannot continue, otherwise we might end up + // with references to non-existing files + throw new IndexShardSnapshotFailedException(shardId, "error deleting index files during cleanup, reason: " + e.getMessage(), e); + } + + blobsToDelete = newArrayList(); // now go over all the blobs, and if they don't exists in a snapshot, delete them for (String blobName : blobs.keySet()) { - // delete old file lists + // delete unused files if (blobName.startsWith(DATA_BLOB_PREFIX)) { if (newSnapshots.findNameFile(FileInfo.canonicalName(blobName)) == null) { - try { - blobContainer.deleteBlob(blobName); - } catch (IOException e) { - logger.debug("[{}] [{}] error deleting blob [{}] during cleanup", e, snapshotId, shardId, blobName); - } + blobsToDelete.add(blobName); } } } + try { + blobContainer.deleteBlobs(blobsToDelete); + } catch (IOException e) { + logger.debug("[{}] [{}] error deleting some of the blobs [{}] during cleanup", e, snapshotId, shardId, blobsToDelete); + } // If we deleted all snapshots - we don't need to create the index file if (snapshots.size() > 0) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java index a1c9c0b3ed6..6a554db60fe 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java @@ -19,11 +19,13 @@ package org.elasticsearch.rest.action.admin.indices.upgrade; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -86,8 +88,11 @@ public class RestUpgradeAction extends BaseRestHandler { builder.startObject(); buildBroadcastShardsHeader(builder, request, response); builder.startObject("upgraded_indices"); - for (Map.Entry entry : response.versions().entrySet()) { - builder.field(entry.getKey(), entry.getValue(), XContentBuilder.FieldCaseConversion.NONE); + for (Map.Entry> entry : response.versions().entrySet()) { + builder.startObject(entry.getKey(), XContentBuilder.FieldCaseConversion.NONE); + builder.field("upgrade_version", entry.getValue().v1()); + builder.field("oldest_lucene_segment_version", entry.getValue().v2()); + builder.endObject(); } builder.endObject(); builder.endObject(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index c3aa077b63d..3a5ea802b28 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -326,7 +326,7 @@ public class RestIndicesAction extends AbstractCatAction { table.addCell(indexStats == null ? null : indexStats.getPrimaries().getDocs().getDeleted()); table.addCell(indexMetaData.creationDate()); - table.addCell(new DateTime(indexMetaData.creationDate(), DateTimeZone.getDefault())); + table.addCell(new DateTime(indexMetaData.creationDate(), DateTimeZone.UTC)); table.addCell(indexStats == null ? null : indexStats.getTotal().getStore().size()); table.addCell(indexStats == null ? null : indexStats.getPrimaries().getStore().size()); diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index 4161578a5a6..3f4891cd825 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -207,6 +207,7 @@ public class VersionTests extends ESTestCase { if (maxBranchVersion == null) { maxBranchVersions.put(branchName, v); } else if (v.after(maxBranchVersion)) { + assertFalse("Version " + maxBranchVersion + " cannot be a snapshot because version " + v + " exists", maxBranchVersion.snapshot()); maxBranchVersions.put(branchName, v); } diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java similarity index 99% rename from core/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeIT.java rename to core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java index 82d88aeb950..89d2a25f078 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.rest.action.admin.indices.upgrade; +package org.elasticsearch.action.admin.indices.upgrade; import com.google.common.base.Predicate; import org.elasticsearch.ExceptionsHelper; diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeReallyOldIndexIT.java similarity index 71% rename from core/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexIT.java rename to core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeReallyOldIndexIT.java index 68777f107e3..4ada599f7d3 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/upgrade/UpgradeReallyOldIndexIT.java @@ -17,15 +17,17 @@ * under the License. */ -package org.elasticsearch.rest.action.admin.indices.upgrade; +package org.elasticsearch.action.admin.indices.upgrade; import org.elasticsearch.Version; import org.elasticsearch.bwcompat.StaticIndexBackwardCompatibilityIT; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.indices.IndicesService; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.containsString; public class UpgradeReallyOldIndexIT extends StaticIndexBackwardCompatibilityIT { @@ -38,11 +40,25 @@ public class UpgradeReallyOldIndexIT extends StaticIndexBackwardCompatibilityIT assertTrue(UpgradeIT.hasAncientSegments(client(), indexName)); assertNoFailures(client().admin().indices().prepareUpgrade(indexName).setUpgradeOnlyAncientSegments(true).get()); - assertFalse(UpgradeIT.hasAncientSegments(client(), "index-0.90.6")); + assertFalse(UpgradeIT.hasAncientSegments(client(), indexName)); // This index has only ancient segments, so it should now be fully upgraded: UpgradeIT.assertUpgraded(client(), indexName); assertEquals(Version.CURRENT.luceneVersion.toString(), client().admin().indices().prepareGetSettings(indexName).get().getSetting(indexName, IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE)); assertMinVersion(indexName, Version.CURRENT.luceneVersion); + + assertEquals(client().admin().indices().prepareGetSettings(indexName).get().getSetting(indexName, IndexMetaData.SETTING_VERSION_UPGRADED), Integer.toString(Version.CURRENT.id)); + } + + public void testUpgradeConflictingMapping() throws Exception { + String indexName = "index-conflicting-mappings-1.7.0"; + logger.info("Checking static index " + indexName); + Settings nodeSettings = prepareBackwardsDataDir(getDataPath(indexName + ".zip")); + try { + internalCluster().startNode(nodeSettings); + fail("Should have failed to start the node"); + } catch (Exception ex) { + assertThat(ex.getMessage(), containsString("conflicts with existing mapping in other types")); + } } private void assertMinVersion(String index, org.apache.lucene.util.Version version) { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java index fe5c0821330..ce2df6b3543 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java @@ -26,7 +26,7 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; public class BulkIntegrationIT extends ESIntegTestCase { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 61361d48fb0..3d462fd17ee 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -37,7 +37,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; diff --git a/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTest.java b/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTest.java index e2c98ce65a6..dd562a9bccd 100644 --- a/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTest.java +++ b/core/src/test/java/org/elasticsearch/action/fieldstats/FieldStatsRequestTest.java @@ -20,8 +20,8 @@ package org.elasticsearch.action.fieldstats; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.StreamsUtils; import static org.elasticsearch.action.fieldstats.IndexConstraint.Comparison.*; import static org.elasticsearch.action.fieldstats.IndexConstraint.Property.MAX; @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.equalTo; public class FieldStatsRequestTest extends ESTestCase { public void testFieldsParsing() throws Exception { - byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/fieldstats/fieldstats-index-constraints-request.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/fieldstats/fieldstats-index-constraints-request.json"); FieldStatsRequest request = new FieldStatsRequest(); request.source(new BytesArray(data)); diff --git a/core/src/test/java/org/elasticsearch/action/percolate/MultiPercolatorRequestTests.java b/core/src/test/java/org/elasticsearch/action/percolate/MultiPercolatorRequestTests.java index 2f2a0728c67..48c75d8267b 100644 --- a/core/src/test/java/org/elasticsearch/action/percolate/MultiPercolatorRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/percolate/MultiPercolatorRequestTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.percolate; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.io.Streams; +import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESTestCase; import org.junit.Test; @@ -35,7 +35,7 @@ public class MultiPercolatorRequestTests extends ESTestCase { @Test public void testParseBulkRequests() throws Exception { - byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/percolate/mpercolate1.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/percolate/mpercolate1.json"); MultiPercolateRequest request = new MultiPercolateRequest().add(data, 0, data.length); assertThat(request.requests().size(), equalTo(8)); @@ -152,7 +152,7 @@ public class MultiPercolatorRequestTests extends ESTestCase { @Test public void testParseBulkRequests_defaults() throws Exception { - byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/percolate/mpercolate2.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/percolate/mpercolate2.json"); MultiPercolateRequest request = new MultiPercolateRequest(); request.indices("my-index1").documentType("my-type1").indicesOptions(IndicesOptions.lenientExpandOpen()); request.add(data, 0, data.length); diff --git a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index 05f7d522346..97aee6ea3d6 100644 --- a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.common.io.Streams; +import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -38,7 +38,7 @@ public class MultiSearchRequestTests extends ESTestCase { @Test public void simpleAdd() throws Exception { - byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch1.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch1.json"); MultiSearchRequest request = new MultiSearchRequest().add(data, 0, data.length, null, null, null); assertThat(request.requests().size(), equalTo(8)); assertThat(request.requests().get(0).indices()[0], equalTo("test")); @@ -64,7 +64,7 @@ public class MultiSearchRequestTests extends ESTestCase { @Test public void simpleAdd2() throws Exception { - byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch2.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch2.json"); MultiSearchRequest request = new MultiSearchRequest().add(data, 0, data.length, null, null, null); assertThat(request.requests().size(), equalTo(5)); assertThat(request.requests().get(0).indices()[0], equalTo("test")); @@ -82,7 +82,7 @@ public class MultiSearchRequestTests extends ESTestCase { @Test public void simpleAdd3() throws Exception { - byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch3.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch3.json"); MultiSearchRequest request = new MultiSearchRequest().add(data, 0, data.length, null, null, null); assertThat(request.requests().size(), equalTo(4)); assertThat(request.requests().get(0).indices()[0], equalTo("test0")); @@ -101,7 +101,7 @@ public class MultiSearchRequestTests extends ESTestCase { @Test public void simpleAdd4() throws Exception { - byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch4.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch4.json"); MultiSearchRequest request = new MultiSearchRequest().add(data, 0, data.length, null, null, null); assertThat(request.requests().size(), equalTo(3)); assertThat(request.requests().get(0).indices()[0], equalTo("test0")); diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java b/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java index 13d923a7901..82809d1c5cd 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java @@ -31,7 +31,6 @@ import org.apache.lucene.store.Directory; import org.elasticsearch.action.termvectors.TermVectorsRequest.Flag; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.xcontent.XContentFactory; @@ -43,6 +42,7 @@ import org.elasticsearch.index.mapper.core.TypeParsers; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.StreamsUtils; import org.hamcrest.Matchers; import org.junit.Test; @@ -292,13 +292,13 @@ public class TermVectorsUnitTests extends ESTestCase { @Test public void testMultiParser() throws Exception { - byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/termvectors/multiRequest1.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/termvectors/multiRequest1.json"); BytesReference bytes = new BytesArray(data); MultiTermVectorsRequest request = new MultiTermVectorsRequest(); request.add(new TermVectorsRequest(), bytes); checkParsedParameters(request); - data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/termvectors/multiRequest2.json"); + data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/termvectors/multiRequest2.json"); bytes = new BytesArray(data); request = new MultiTermVectorsRequest(); request.add(new TermVectorsRequest(), bytes); @@ -328,7 +328,7 @@ public class TermVectorsUnitTests extends ESTestCase { @Test // issue #12311 public void testMultiParserFilter() throws Exception { - byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/termvectors/multiRequest3.json"); + byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/termvectors/multiRequest3.json"); BytesReference bytes = new BytesArray(data); MultiTermVectorsRequest request = new MultiTermVectorsRequest(); request.add(new TermVectorsRequest(), bytes); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index c2ba057f9e9..db464c31516 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -26,6 +26,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.admin.indices.upgrade.UpgradeIT; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -42,7 +43,6 @@ import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.MergePolicyConfig; import org.elasticsearch.indices.recovery.RecoverySettings; -import org.elasticsearch.rest.action.admin.indices.upgrade.UpgradeIT; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; diff --git a/core/src/test/java/org/elasticsearch/common/cli/CliToolTestCase.java b/core/src/test/java/org/elasticsearch/common/cli/CliToolTestCase.java index fdfee87dfcc..b30788cf702 100644 --- a/core/src/test/java/org/elasticsearch/common/cli/CliToolTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/cli/CliToolTestCase.java @@ -21,11 +21,10 @@ package org.elasticsearch.common.cli; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.StreamsUtils; import org.junit.After; import org.junit.Before; -import org.junit.Ignore; import java.io.IOException; import java.io.PrintWriter; @@ -161,7 +160,7 @@ public abstract class CliToolTestCase extends ESTestCase { } assertThat(nonEmptyLines, hasSize(greaterThan(0))); - String expectedDocs = Streams.copyToStringFromClasspath(classPath); + String expectedDocs = StreamsUtils.copyToStringFromClasspath(classPath); for (String nonEmptyLine : nonEmptyLines) { assertThat(expectedDocs, containsString(nonEmptyLine.replaceAll(System.lineSeparator(), ""))); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index e80dcec3e97..b50f2da849c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -56,15 +56,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; public class SimpleAllMapperTests extends ESSingleNodeTestCase { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java index 83a14687b32..76383408ed8 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/genericstore/GenericStoreDynamicTemplateTests.java @@ -29,8 +29,8 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Test; -import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.equalTo; /** diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java index b4c1ba668f4..34c855f4f2e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/PathMatchDynamicTemplateTests.java @@ -29,8 +29,8 @@ import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Test; -import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.equalTo; /** diff --git a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java index c136f4d989c..09358b5280c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/dynamictemplate/simple/SimpleDynamicTemplatesTests.java @@ -31,8 +31,8 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.hamcrest.Matchers; import org.junit.Test; -import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.equalTo; /** diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index 8511bf96c08..dc14ebecd5e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -47,8 +47,8 @@ import java.util.Collections; import java.util.Map; import java.util.TreeMap; -import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.mapper.MapperBuilders.doc; import static org.elasticsearch.index.mapper.MapperBuilders.rootObject; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java index 5405aee6ec8..655069ed9e2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java @@ -32,7 +32,7 @@ import org.junit.Test; import java.util.Arrays; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.*; /** diff --git a/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java index 4bd12550d89..06ef922e94d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/path/PathMapperTests.java @@ -25,7 +25,7 @@ import org.junit.Test; import java.io.IOException; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java index 5c4bd28a028..a08eacce647 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java @@ -30,8 +30,8 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Test; -import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.index.mapper.MapperBuilders.*; import static org.hamcrest.Matchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java index 7478b79bf7b..4d4d06bd292 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterIT.java @@ -30,7 +30,7 @@ import org.junit.Test; import java.util.HashMap; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index a45d4a34a13..4b136a78b65 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -36,7 +36,7 @@ import org.junit.Test; import java.io.IOException; import java.util.LinkedHashMap; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java b/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java index 4a60a2da464..8b4c40585c4 100644 --- a/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeFormatTests.java @@ -38,8 +38,8 @@ import org.junit.Test; import java.io.IOException; -import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; diff --git a/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java b/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java index 5c7b122096a..d581aa6ae77 100644 --- a/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/IndexQueryParserFilterDateRangeTimezoneTests.java @@ -38,8 +38,8 @@ import org.junit.Test; import java.io.IOException; -import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java index 4be37c8c8aa..f441aa298ad 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java @@ -73,8 +73,8 @@ import java.util.Arrays; import java.util.EnumSet; import java.util.List; -import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DedicatedAggregationIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DedicatedAggregationIT.java index 941cf238d5a..0f22bb42d5f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DedicatedAggregationIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DedicatedAggregationIT.java @@ -26,7 +26,7 @@ import org.junit.Test; import java.io.IOException; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.CoreMatchers.equalTo; diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 5f89af38660..4a1eed7a457 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -55,7 +55,7 @@ import java.io.IOException; import java.util.*; import static com.google.common.collect.Maps.newHashMap; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.*; diff --git a/core/src/test/java/org/elasticsearch/search/morelikethis/ItemSerializationTests.java b/core/src/test/java/org/elasticsearch/search/morelikethis/ItemSerializationTests.java index 91a1731feda..ab06a3d5d51 100644 --- a/core/src/test/java/org/elasticsearch/search/morelikethis/ItemSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/search/morelikethis/ItemSerializationTests.java @@ -37,7 +37,7 @@ import java.io.IOException; import java.util.List; import java.util.Random; -import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; +import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.is; public class ItemSerializationTests extends ESTestCase { diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java index 6ab2cdf782b..e45d4169995 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.util.Collection; import java.util.Map; /** @@ -63,6 +64,11 @@ public class BlobContainerWrapper implements BlobContainer { delegate.deleteBlob(blobName); } + @Override + public void deleteBlobs(Collection blobNames) throws IOException { + delegate.deleteBlobs(blobNames); + } + @Override public void deleteBlobsByPrefix(String blobNamePrefix) throws IOException { delegate.deleteBlobsByPrefix(blobNamePrefix); diff --git a/core/src/test/java/org/elasticsearch/test/StreamsUtils.java b/core/src/test/java/org/elasticsearch/test/StreamsUtils.java new file mode 100644 index 00000000000..ffc9537c3ee --- /dev/null +++ b/core/src/test/java/org/elasticsearch/test/StreamsUtils.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test; + +import com.google.common.base.Charsets; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.BytesStreamOutput; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; + +public class StreamsUtils { + + public static String copyToStringFromClasspath(ClassLoader classLoader, String path) throws IOException { + InputStream is = classLoader.getResourceAsStream(path); + if (is == null) { + throw new FileNotFoundException("Resource [" + path + "] not found in classpath with class loader [" + classLoader + "]"); + } + return Streams.copyToString(new InputStreamReader(is, Charsets.UTF_8)); + } + + public static String copyToStringFromClasspath(String path) throws IOException { + InputStream is = Streams.class.getResourceAsStream(path); + if (is == null) { + throw new FileNotFoundException("Resource [" + path + "] not found in classpath"); + } + return Streams.copyToString(new InputStreamReader(is, Charsets.UTF_8)); + } + + public static byte[] copyToBytesFromClasspath(String path) throws IOException { + try (InputStream is = Streams.class.getResourceAsStream(path)) { + if (is == null) { + throw new FileNotFoundException("Resource [" + path + "] not found in classpath"); + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + Streams.copy(is, out); + return out.bytes().toBytes(); + } + } + } + +} diff --git a/core/src/test/resources/org/elasticsearch/rest/action/admin/indices/upgrade/index-0.90.6.zip b/core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade/index-0.90.6.zip similarity index 100% rename from core/src/test/resources/org/elasticsearch/rest/action/admin/indices/upgrade/index-0.90.6.zip rename to core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade/index-0.90.6.zip diff --git a/core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade/index-conflicting-mappings-1.7.0.zip b/core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade/index-conflicting-mappings-1.7.0.zip new file mode 100644 index 00000000000..559aeb78e7e Binary files /dev/null and b/core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade/index-conflicting-mappings-1.7.0.zip differ diff --git a/dev-tools/build_release.py b/dev-tools/build_release.py index 483076a6118..5d41aef90bc 100644 --- a/dev-tools/build_release.py +++ b/dev-tools/build_release.py @@ -251,27 +251,20 @@ def build_release(release_version, run_tests=False, dry_run=True, cpus=1, bwc_ve print('Running Backwards compatibility tests against version [%s]' % (bwc_version)) run_mvn('clean', 'test -Dtests.filter=@backwards -Dtests.bwc.version=%s -Dtests.bwc=true -Dtests.jvms=1' % bwc_version) run_mvn('clean test-compile -Dforbidden.test.signatures="org.apache.lucene.util.LuceneTestCase\$AwaitsFix @ Please fix all bugs before release"') - gpg_args = '-Dgpg.key="%s" -Dgpg.passphrase="%s" -Ddeb.sign=true -Drpm.sign=true' % (env.get('GPG_KEY_ID'), env.get('GPG_PASSPHRASE')) + # dont sign the RPM, so older distros will be able to use the uploaded RPM package + gpg_args = '-Dgpg.key="%s" -Dgpg.passphrase="%s" -Ddeb.sign=true -Drpm.sign=false' % (env.get('GPG_KEY_ID'), env.get('GPG_PASSPHRASE')) if env.get('GPG_KEYRING'): gpg_args += ' -Dgpg.keyring="%s"' % env.get('GPG_KEYRING') run_mvn('clean %s -DskipTests %s' % (target, gpg_args)) success = False try: - # create unsigned RPM first for downloads.elasticsearch.org - run_mvn('-DskipTests rpm:rpm') - # move unsigned RPM to target/releases - # this is an oddness of RPM that is attaches -1 so we have to rename it - rpm = os.path.join('target/rpm/elasticsearch/RPMS/noarch/', 'elasticsearch-%s-1.noarch.rpm' % release_version) + # create additional signed RPM for the repositories + run_mvn('-f distribution/rpm/pom.xml package -DskipTests -Dsign.rpm=true -Drpm.outputDirectory=target/releases/signed/ %s' % (gpg_args)) + rpm = os.path.join('target/releases/signed', 'elasticsearch-%s.rpm' % release_version) if os.path.isfile(rpm): - log('RPM [%s] contains: ' % rpm) + log('Signed RPM [%s] contains: ' % rpm) run('rpm -pqli %s' % rpm) - renamed_rpm = os.path.join('target/releases/', 'elasticsearch-%s.noarch.rpm' % release_version) - shutil.move(rpm, renamed_rpm) - else: - raise RuntimeError('Could not find required RPM at %s' % rpm) - # now create signed RPM for repositories - run_mvn('-DskipTests rpm:rpm %s' % (gpg_args)) - success = True + success = True finally: if not success: print(""" @@ -358,63 +351,44 @@ def find_release_version(src_branch): return match.group(1) raise RuntimeError('Could not find release version in branch %s' % src_branch) -def artifact_names(release, path = ''): - artifacts = [os.path.join(path, 'elasticsearch-%s.%s' % (release, t)) for t in ['deb', 'tar.gz', 'zip']] - artifacts.append(os.path.join(path, 'elasticsearch-%s.noarch.rpm' % (release))) +def artifact_names(release): + artifacts = [] + artifacts.append(os.path.join('distribution/zip/target/releases', 'elasticsearch-%s.zip' % (release))) + artifacts.append(os.path.join('distribution/tar/target/releases', 'elasticsearch-%s.tar.gz' % (release))) + artifacts.append(os.path.join('distribution/deb/target/releases', 'elasticsearch-%s.deb' % (release))) + artifacts.append(os.path.join('distribution/rpm/target/releases', 'elasticsearch-%s.rpm' % (release))) return artifacts def get_artifacts(release): - common_artifacts = artifact_names(release, 'target/releases/') + common_artifacts = artifact_names(release) for f in common_artifacts: if not os.path.isfile(f): raise RuntimeError('Could not find required artifact at %s' % f) return common_artifacts -# Checks the jar files in each package -# Barfs if any of the package jar files differ -def check_artifacts_for_same_jars(artifacts): - jars = [] - for file in artifacts: - if file.endswith('.zip'): - jars.append(subprocess.check_output("unzip -l %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True)) - if file.endswith('.tar.gz'): - jars.append(subprocess.check_output("tar tzvf %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True)) - if file.endswith('.rpm'): - jars.append(subprocess.check_output("rpm -pqli %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True)) - if file.endswith('.deb'): - jars.append(subprocess.check_output("dpkg -c %s | grep '\.jar$' | awk -F '/' '{ print $NF }' | sort" % file, shell=True)) - if len(set(jars)) != 1: - raise RuntimeError('JAR contents of packages are not the same, please check the package contents. Use [unzip -l], [tar tzvf], [dpkg -c], [rpm -pqli] to inspect') - -# Generates sha1 checsums for all files -# and returns the checksum files as well -# as the given files in a list -def generate_checksums(files): - res = [] - for release_file in files: - directory = os.path.dirname(release_file) - file = os.path.basename(release_file) - checksum_file = '%s.sha1.txt' % file - - if os.system('cd %s; shasum %s > %s' % (directory, file, checksum_file)): - raise RuntimeError('Failed to generate checksum for file %s' % release_file) - res = res + [os.path.join(directory, checksum_file), release_file] - return res - -def download_and_verify(release, files, plugins=None, base_url='https://download.elastic.co/elasticsearch/elasticsearch'): +# Sample URL: +# http://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/elasticsearch-rpm/2.0.0-beta1-SNAPSHOT/elasticsearch-rpm-2.0.0-beta1-SNAPSHOT.rpm +def download_and_verify(release, files, plugins=None, base_url='https://download.elastic.co/elasticsearch/release/org/elasticsearch/distribution'): print('Downloading and verifying release %s from %s' % (release, base_url)) tmp_dir = tempfile.mkdtemp() try: downloaded_files = [] for file in files: name = os.path.basename(file) - url = '%s/%s' % (base_url, name) + if name.endswith('tar.gz'): + url = '%s/tar/elasticsearch/%s/%s' % (base_url, release, name) + elif name.endswith('zip'): + url = '%s/zip/elasticsearch/%s/%s' % (base_url, release, name) + elif name.endswith('rpm'): + url = '%s/rpm/elasticsearch/%s/%s' % (base_url, release, name) + elif name.endswith('deb'): + url = '%s/deb/elasticsearch/%s/%s' % (base_url, release, name) abs_file_path = os.path.join(tmp_dir, name) print(' Downloading %s' % (url)) downloaded_files.append(abs_file_path) urllib.request.urlretrieve(url, abs_file_path) - url = ''.join([url, '.sha1.txt']) - checksum_file = os.path.join(tmp_dir, ''.join([abs_file_path, '.sha1.txt'])) + url = ''.join([url, '.sha1']) + checksum_file = os.path.join(tmp_dir, ''.join([abs_file_path, '.sha1'])) urllib.request.urlretrieve(url, checksum_file) print(' Verifying checksum %s' % (checksum_file)) run('cd %s && sha1sum -c %s' % (tmp_dir, os.path.basename(checksum_file))) @@ -444,10 +418,7 @@ def smoke_test_release(release, files, expected_hash, plugins): run('%s; %s install %s' % (java_exe(), es_plugin_path, plugin)) plugin_names[name] = True - if release.startswith("0.90."): - background = '' # 0.90.x starts in background automatically - else: - background = '-d' + background = '-d' print(' Starting elasticsearch deamon from [%s]' % os.path.join(tmp_dir, 'elasticsearch-%s' % release)) run('%s; %s -Des.node.name=smoke_tester -Des.cluster.name=prepare_release -Des.discovery.zen.ping.multicast.enabled=false -Des.script.inline=on -Des.script.indexed=on %s' % (java_exe(), es_run_path, background)) @@ -505,21 +476,11 @@ def merge_tag_push(remote, src_branch, release_version, dry_run): else: print(' dryrun [True] -- skipping push to remote %s' % remote) -def publish_artifacts(artifacts, base='elasticsearch/elasticsearch', dry_run=True): - location = os.path.dirname(os.path.realpath(__file__)) - for artifact in artifacts: - if dry_run: - print('Skip Uploading %s to Amazon S3' % artifact) - else: - print('Uploading %s to Amazon S3' % artifact) - # requires boto to be installed but it is not available on python3k yet so we use a dedicated tool - run('python %s/upload-s3.py --file %s ' % (location, os.path.abspath(artifact))) - def publish_repositories(version, dry_run=True): if dry_run: print('Skipping package repository update') else: - print('Triggering repository update - calling dev-tools/build_repositories.sh %s' % version) + print('Triggering repository update for version %s - calling dev-tools/build_repositories.sh %s' % (version, src_branch)) # src_branch is a version like 1.5/1.6/2.0/etc.. so we can use this run('dev-tools/build_repositories.sh %s' % src_branch) @@ -756,22 +717,17 @@ if __name__ == '__main__': print('Building Release candidate') input('Press Enter to continue...') if not dry_run: - print(' Running maven builds now and publish to Sonatype - run-tests [%s]' % run_tests) + print(' Running maven builds now and publish to Sonatype and S3 - run-tests [%s]' % run_tests) else: print(' Running maven builds now run-tests [%s]' % run_tests) build_release(release_version, run_tests=run_tests, dry_run=dry_run, cpus=cpus, bwc_version=find_bwc_version(release_version, bwc_path)) artifacts = get_artifacts(release_version) - print('Checking if all artifacts contain the same jars') - check_artifacts_for_same_jars(artifacts) - artifacts_and_checksum = generate_checksums(artifacts) smoke_test_release(release_version, artifacts, get_head_hash(), PLUGINS) print(''.join(['-' for _ in range(80)])) print('Finish Release -- dry_run: %s' % dry_run) input('Press Enter to continue...') print(' merge release branch, tag and push to %s %s -- dry_run: %s' % (remote, src_branch, dry_run)) merge_tag_push(remote, src_branch, release_version, dry_run) - print(' publish artifacts to S3 -- dry_run: %s' % dry_run) - publish_artifacts(artifacts_and_checksum, dry_run=dry_run) print(' Updating package repositories -- dry_run: %s' % dry_run) publish_repositories(src_branch, dry_run=dry_run) cherry_pick_command = '.' diff --git a/dev-tools/build_repositories.sh b/dev-tools/build_repositories.sh index 9bad8ff2c14..d00f6c2e2ad 100755 --- a/dev-tools/build_repositories.sh +++ b/dev-tools/build_repositories.sh @@ -158,8 +158,8 @@ mkdir -p $centosdir echo "RPM: Syncing repository for version $version into $centosdir" $s3cmd sync s3://$S3_BUCKET_SYNC_FROM/elasticsearch/$version/centos/ $centosdir -rpm=target/rpm/elasticsearch/RPMS/noarch/elasticsearch*.rpm -echo "RPM: Copying $rpm into $centosdor" +rpm=distribution/rpm/target/releases/signed/elasticsearch*.rpm +echo "RPM: Copying signed $rpm into $centosdir" cp $rpm $centosdir echo "RPM: Running createrepo in $centosdir" @@ -176,7 +176,7 @@ $s3cmd sync -P $centosdir/ s3://$S3_BUCKET_SYNC_TO/elasticsearch/$version/centos ## DEB ################### -deb=target/releases/elasticsearch*.deb +deb=distribution/deb/target/releases/elasticsearch*.deb echo "DEB: Creating repository directory structure" diff --git a/dev-tools/create_bwc_index_with_conficting_mappings.py b/dev-tools/create_bwc_index_with_conficting_mappings.py new file mode 100644 index 00000000000..bc82e03dd8d --- /dev/null +++ b/dev-tools/create_bwc_index_with_conficting_mappings.py @@ -0,0 +1,93 @@ +import create_bwc_index +import logging +import os +import random +import shutil +import subprocess +import sys +import tempfile + +def fetch_version(version): + logging.info('fetching ES version %s' % version) + if subprocess.call([sys.executable, os.path.join(os.path.split(sys.argv[0])[0], 'get-bwc-version.py'), version]) != 0: + raise RuntimeError('failed to download ES version %s' % version) + +def main(): + ''' + Creates a static back compat index (.zip) with conflicting mappings. + ''' + + logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO, + datefmt='%Y-%m-%d %I:%M:%S %p') + logging.getLogger('elasticsearch').setLevel(logging.ERROR) + logging.getLogger('urllib3').setLevel(logging.WARN) + + tmp_dir = tempfile.mkdtemp() + try: + data_dir = os.path.join(tmp_dir, 'data') + repo_dir = os.path.join(tmp_dir, 'repo') + logging.info('Temp data dir: %s' % data_dir) + logging.info('Temp repo dir: %s' % repo_dir) + + version = '1.7.0' + classifier = 'conflicting-mappings-%s' % version + index_name = 'index-%s' % classifier + + # Download old ES releases if necessary: + release_dir = os.path.join('backwards', 'elasticsearch-%s' % version) + if not os.path.exists(release_dir): + fetch_version(version) + + node = create_bwc_index.start_node(version, release_dir, data_dir, repo_dir, cluster_name=index_name) + client = create_bwc_index.create_client() + + put_conflicting_mappings(client, index_name) + create_bwc_index.shutdown_node(node) + print('%s server output:\n%s' % (version, node.stdout.read().decode('utf-8'))) + node = None + create_bwc_index.compress_index(classifier, tmp_dir, 'core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade') + finally: + if node is not None: + create_bwc_index.shutdown_node(node) + shutil.rmtree(tmp_dir) + +def put_conflicting_mappings(client, index_name): + client.indices.delete(index=index_name, ignore=404) + logging.info('Create single shard test index') + + mappings = {} + # backwardcompat test for conflicting mappings, see #11857 + mappings['x'] = { + 'analyzer': 'standard', + "properties": { + "foo": { + "type": "string" + } + } + } + mappings['y'] = { + 'analyzer': 'standard', + "properties": { + "foo": { + "type": "date" + } + } + } + + client.indices.create(index=index_name, body={ + 'settings': { + 'number_of_shards': 1, + 'number_of_replicas': 0 + }, + 'mappings': mappings + }) + health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0) + assert health['timed_out'] == False, 'cluster health timed out %s' % health + num_docs = random.randint(2000, 3000) + create_bwc_index.index_documents(client, index_name, 'doc', num_docs) + logging.info('Running basic asserts on the data added') + create_bwc_index.run_basic_asserts(client, index_name, 'doc', num_docs) + +if __name__ == '__main__': + main() + diff --git a/dev-tools/create_bwc_index_with_some_ancient_segments.py b/dev-tools/create_bwc_index_with_some_ancient_segments.py index d1162d4690f..573683a1cbb 100644 --- a/dev-tools/create_bwc_index_with_some_ancient_segments.py +++ b/dev-tools/create_bwc_index_with_some_ancient_segments.py @@ -25,7 +25,9 @@ def main(): tmp_dir = tempfile.mkdtemp() try: data_dir = os.path.join(tmp_dir, 'data') + repo_dir = os.path.join(tmp_dir, 'repo') logging.info('Temp data dir: %s' % data_dir) + logging.info('Temp repo dir: %s' % repo_dir) first_version = '0.20.6' second_version = '0.90.6' @@ -36,7 +38,7 @@ def main(): if not os.path.exists(release_dir): fetch_version(first_version) - node = create_bwc_index.start_node(first_version, release_dir, data_dir, cluster_name=index_name) + node = create_bwc_index.start_node(first_version, release_dir, data_dir, repo_dir, cluster_name=index_name) client = create_bwc_index.create_client() # Creates the index & indexes docs w/ first_version: @@ -63,7 +65,7 @@ def main(): fetch_version(second_version) # Now also index docs with second_version: - node = create_bwc_index.start_node(second_version, release_dir, data_dir, cluster_name=index_name) + node = create_bwc_index.start_node(second_version, release_dir, data_dir, repo_dir, cluster_name=index_name) client = create_bwc_index.create_client() # If we index too many docs, the random refresh/flush causes the ancient segments to be merged away: @@ -102,7 +104,7 @@ def main(): create_bwc_index.shutdown_node(node) print('%s server output:\n%s' % (second_version, node.stdout.read().decode('utf-8'))) node = None - create_bwc_index.compress_index('%s-and-%s' % (first_version, second_version), tmp_dir, 'src/test/resources/org/elasticsearch/rest/action/admin/indices/upgrade') + create_bwc_index.compress_index('%s-and-%s' % (first_version, second_version), tmp_dir, 'core/src/test/resources/org/elasticsearch/action/admin/indices/upgrade') finally: if node is not None: create_bwc_index.shutdown_node(node) diff --git a/dev-tools/src/main/resources/ant/integration-tests.xml b/dev-tools/src/main/resources/ant/integration-tests.xml index fa454983130..4cd6b4728f8 100644 --- a/dev-tools/src/main/resources/ant/integration-tests.xml +++ b/dev-tools/src/main/resources/ant/integration-tests.xml @@ -12,10 +12,14 @@ + + + @@ -119,12 +123,24 @@ Starting up external cluster... + args="@{args} -Des.path.repo=@{home}/repo"/> - - + + + + + + + + + + External cluster started PID ${integ.pid} @@ -143,9 +159,9 @@ - + - + @@ -185,7 +201,7 @@ - @@ -200,7 +216,7 @@ - @@ -217,7 +233,7 @@ - + @@ -244,7 +260,7 @@ - + diff --git a/dev-tools/src/main/resources/forbidden/all-signatures.txt b/dev-tools/src/main/resources/forbidden/all-signatures.txt index d33c4ee9b8b..b03cd14731f 100644 --- a/dev-tools/src/main/resources/forbidden/all-signatures.txt +++ b/dev-tools/src/main/resources/forbidden/all-signatures.txt @@ -46,13 +46,14 @@ java.nio.file.FileSystems#getDefault() @ use PathUtils.getDefault instead. java.nio.file.Files#createTempDirectory(java.lang.String,java.nio.file.attribute.FileAttribute[]) java.nio.file.Files#createTempFile(java.lang.String,java.lang.String,java.nio.file.attribute.FileAttribute[]) -@defaultMessage Constructing a DateTime without a time zone is dangerous - use DateTime(DateTimeZone.getDefault()) if you really want the default timezone +@defaultMessage Constructing a DateTime without a time zone is dangerous org.joda.time.DateTime#() org.joda.time.DateTime#(long) org.joda.time.DateTime#(int, int, int, int, int) org.joda.time.DateTime#(int, int, int, int, int, int) org.joda.time.DateTime#(int, int, int, int, int, int, int) org.joda.time.DateTime#now() +org.joda.time.DateTimeZone#getDefault() com.google.common.collect.Iterators#emptyIterator() @ Use Collections.emptyIterator instead diff --git a/dev-tools/src/main/resources/license-check/check_license_and_sha.pl b/dev-tools/src/main/resources/license-check/check_license_and_sha.pl index 9648a1f21a8..5af5b6b18ed 100755 --- a/dev-tools/src/main/resources/license-check/check_license_and_sha.pl +++ b/dev-tools/src/main/resources/license-check/check_license_and_sha.pl @@ -2,41 +2,33 @@ use strict; use warnings; -use v5.10; use FindBin qw($RealBin); use lib "$RealBin/lib"; -use Archive::Ar(); -use Cwd(); use File::Spec(); -use Digest::SHA qw(sha1); use File::Temp(); +use File::Find(); +use Digest::SHA qw(sha1); use File::Basename qw(basename); use Archive::Extract(); $Archive::Extract::PREFER_BIN = 1; -our %Extract_Package = ( - zip => \&extract_zip, - gz => \&extract_tar_gz, - rpm => \&extract_rpm, - deb => \&extract_deb -); - my $mode = shift(@ARGV) || ""; die usage() unless $mode =~ /^--(check|update)$/; my $License_Dir = shift(@ARGV) || die usage(); -my $Package = shift(@ARGV) || die usage(); +my $Source = shift(@ARGV) || die usage(); $License_Dir = File::Spec->rel2abs($License_Dir) . '/'; -$Package = File::Spec->rel2abs($Package); +$Source = File::Spec->rel2abs($Source); die "License dir is not a directory: $License_Dir\n" . usage() unless -d $License_Dir; -die "Package is not a file: $Package\n" . usage() - unless -f $Package; +my %shas + = -f $Source ? jars_from_zip($Source) + : -d $Source ? jars_from_dir($Source) + : die "Source is neither a directory nor a zip file: $Source" . usage(); -my %shas = get_shas_from_package($Package); $mode eq '--check' ? exit check_shas_and_licenses(%shas) : exit write_shas(%shas); @@ -56,15 +48,15 @@ sub check_shas_and_licenses { for my $jar ( sort keys %new ) { my $old_sha = delete $old{$jar}; unless ($old_sha) { - say STDERR "$jar: SHA is missing"; + print STDERR "$jar: SHA is missing\n"; $error++; $sha_error++; next; } unless ( $old_sha eq $new{$jar} ) { - say STDERR - "$jar: SHA has changed, expected $old_sha but found $new{$jar}"; + print STDERR + "$jar: SHA has changed, expected $old_sha but found $new{$jar}\n"; $error++; $sha_error++; next; @@ -92,43 +84,49 @@ sub check_shas_and_licenses { } } unless ($license_found) { - say STDERR "$jar: LICENSE is missing"; + print STDERR "$jar: LICENSE is missing\n"; $error++; $sha_error++; } unless ($notice_found) { - say STDERR "$jar: NOTICE is missing"; + print STDERR "$jar: NOTICE is missing\n"; $error++; } } if ( keys %old ) { - say STDERR "Extra SHA files present for: " . join ", ", sort keys %old; + print STDERR "Extra SHA files present for: " . join ", ", + sort keys %old; + print "\n"; $error++; } my @unused_licenses = grep { !$licenses{$_} } keys %licenses; if (@unused_licenses) { - say STDERR "Extra LICENCE file present: " . join ", ", + $error++; + print STDERR "Extra LICENCE file present: " . join ", ", sort @unused_licenses; + print "\n"; } my @unused_notices = grep { !$notices{$_} } keys %notices; if (@unused_notices) { - say STDERR "Extra NOTICE file present: " . join ", ", + $error++; + print STDERR "Extra NOTICE file present: " . join ", ", sort @unused_notices; + print "\n"; } if ($sha_error) { - say STDERR <<"SHAS" + print STDERR <<"SHAS" You can update the SHA files by running: -$0 --update $License_Dir $Package +$0 --update $License_Dir $Source SHAS } - say "All SHAs and licenses OK" unless $error; + print("All SHAs and licenses OK\n") unless $error; return $error; } @@ -141,13 +139,13 @@ sub write_shas { for my $jar ( sort keys %new ) { if ( $old{$jar} ) { next if $old{$jar} eq $new{$jar}; - say "Updating $jar"; + print "Updating $jar\n"; } else { - say "Adding $jar"; + print "Adding $jar\n"; } open my $fh, '>', $License_Dir . $jar or die $!; - say $fh $new{$jar} or die $!; + print $fh $new{$jar} . "\n" or die $!; close $fh or die $!; } continue { @@ -155,10 +153,10 @@ sub write_shas { } for my $jar ( sort keys %old ) { - say "Deleting $jar"; + print "Deleting $jar\n"; unlink $License_Dir . $jar or die $!; } - say "SHAs updated"; + print "SHAs updated\n"; return 0; } @@ -194,82 +192,37 @@ sub get_sha_files { } #=================================== -sub get_shas_from_package { +sub jars_from_zip { #=================================== - my $package = shift; - my ($type) = ( $package =~ /\.(\w+)$/ ); - die "Unrecognised package type: $package" - unless $type && $Extract_Package{$type}; - + my ($source) = @_; my $temp_dir = File::Temp->newdir; - my $files - = eval { $Extract_Package{$type}->( $package, $temp_dir->dirname ) } - or die "Couldn't extract $package: $@"; - - my @jars = map {"$temp_dir/$_"} - grep { /\.jar$/ && !/elasticsearch[^\/]*$/ } @$files; + my $dir_name = $temp_dir->dirname; + my $archive = Archive::Extract->new( archive => $source, type => 'zip' ); + $archive->extract( to => $dir_name ) || die $archive->error; + my @jars = map { File::Spec->rel2abs( $_, $dir_name ) } + grep { /\.jar$/ && !/elasticsearch[^\/]*$/ } @{ $archive->files }; + die "No JARS found in: $source\n" + unless @jars; return calculate_shas(@jars); } #=================================== -sub extract_zip { +sub jars_from_dir { #=================================== - my ( $package, $dir ) = @_; - my $archive = Archive::Extract->new( archive => $package, type => 'zip' ); - $archive->extract( to => $dir ) || die $archive->error; - return $archive->files; -} - -#=================================== -sub extract_tar_gz { -#=================================== - my ( $package, $dir ) = @_; - my $archive = Archive::Extract->new( archive => $package, type => 'tgz' ); - $archive->extract( to => $dir ) || die $archive->error; - return $archive->files; -} - -#=================================== -sub extract_rpm { -#=================================== - my ( $package, $dir ) = @_; - my $cwd = Cwd::cwd(); - my @files; - eval { - chdir $dir; - say "Trying with rpm2cpio"; - my $out = eval {`rpm2cpio '$package' | cpio -idmv --quiet`}; - unless ($out) { - say "Trying with rpm2cpio.pl"; - $out = eval {`rpm2cpio.pl '$package' | cpio -idmv --quiet`}; - } - @files = split "\n", $out if $out; - }; - chdir $cwd; - die $@ if $@; - die "Couldn't extract $package\n" unless @files; - return \@files; -} - -#=================================== -sub extract_deb { -#=================================== - my ( $package, $dir ) = @_; - my $archive = Archive::Ar->new; - $archive->read($package) || die $archive->error; - my $cwd = Cwd::cwd(); - eval { - chdir $dir; - $archive->extract('data.tar.gz') || die $archive->error; - }; - chdir $cwd; - die $@ if $@; - $archive = Archive::Extract->new( - archive => $dir . '/data.tar.gz', - type => 'tgz' + my $source = shift; + my @jars; + File::Find::find( + { wanted => sub { + push @jars, File::Spec->rel2abs( $_, $source ) + if /\.jar$/ && !/elasticsearch[^\/]*$/; + }, + no_chdir => 1 + }, + $source ); - $archive->extract( to => $dir ) || die $archive->error; - return $archive->files; + die "No JARS found in: $source\n" + unless @jars; + return calculate_shas(@jars); } #=================================== @@ -291,11 +244,13 @@ sub usage { USAGE: - # check the sha1 and LICENSE files for each jar in the zip|gz|deb|rpm + # check the sha1 and LICENSE files for each jar in the zip or directory $0 --check path/to/licenses/ path/to/package.zip + $0 --check path/to/licenses/ path/to/dir/ - # updates the sha1s for each jar in the zip|gz|deb|rpm + # updates the sha1s for each jar in the zip or directory $0 --update path/to/licenses/ path/to/package.zip + $0 --update path/to/licenses/ path/to/dir/ USAGE diff --git a/dev-tools/src/main/resources/license-check/lib/Archive/Ar.pm b/dev-tools/src/main/resources/license-check/lib/Archive/Ar.pm deleted file mode 100644 index 6d6439b37b4..00000000000 --- a/dev-tools/src/main/resources/license-check/lib/Archive/Ar.pm +++ /dev/null @@ -1,806 +0,0 @@ -########################################################### -# Archive::Ar - Pure perl module to handle ar achives -# -# Copyright 2003 - Jay Bonci -# Copyright 2014 - John Bazik -# Licensed under the same terms as perl itself -# -########################################################### -package Archive::Ar; - -use base qw(Exporter); -our @EXPORT_OK = qw(COMMON BSD GNU); - -use strict; -use File::Spec; -use Time::Local; -use Carp qw(carp longmess); - -use vars qw($VERSION); -$VERSION = '2.02'; - -use constant CAN_CHOWN => ($> == 0 and $^O ne 'MacOS' and $^O ne 'MSWin32'); - -use constant ARMAG => "!\n"; -use constant SARMAG => length(ARMAG); -use constant ARFMAG => "`\n"; -use constant AR_EFMT1 => "#1/"; - -use constant COMMON => 1; -use constant BSD => 2; -use constant GNU => 3; - -my $has_io_string; -BEGIN { - $has_io_string = eval { - require IO::String; - IO::String->import(); - 1; - } || 0; -} - -sub new { - my $class = shift; - my $file = shift; - my $opts = shift || 0; - my $self = bless {}, $class; - my $defopts = { - chmod => 1, - chown => 1, - same_perms => ($> == 0) ? 1:0, - symbols => undef, - }; - $opts = {warn => $opts} unless ref $opts; - - $self->clear(); - $self->{opts} = {(%$defopts, %{$opts})}; - if ($file) { - return unless $self->read($file); - } - return $self; -} - -sub set_opt { - my $self = shift; - my $name = shift; - my $val = shift; - - $self->{opts}->{$name} = $val; -} - -sub get_opt { - my $self = shift; - my $name = shift; - - return $self->{opts}->{$name}; -} - -sub type { - return shift->{type}; -} - -sub clear { - my $self = shift; - - $self->{names} = []; - $self->{files} = {}; - $self->{type} = undef; -} - -sub read { - my $self = shift; - my $file = shift; - - my $fh = $self->_get_handle($file); - local $/ = undef; - my $data = <$fh>; - close $fh; - - return $self->read_memory($data); -} - -sub read_memory { - my $self = shift; - my $data = shift; - - $self->clear(); - return unless $self->_parse($data); - return length($data); -} - -sub contains_file { - my $self = shift; - my $filename = shift; - - return unless defined $filename; - return exists $self->{files}->{$filename}; -} - -sub extract { - my $self = shift; - - for my $filename (@_ ? @_ : @{$self->{names}}) { - $self->extract_file($filename) or return; - } - return 1; -} - -sub extract_file { - my $self = shift; - my $filename = shift; - my $target = shift || $filename; - - my $meta = $self->{files}->{$filename}; - return $self->_error("$filename: not in archive") unless $meta; - open my $fh, '>', $target or return $self->_error("$target: $!"); - binmode $fh; - syswrite $fh, $meta->{data} or return $self->_error("$filename: $!"); - close $fh or return $self->_error("$filename: $!"); - if (CAN_CHOWN && $self->{opts}->{chown}) { - chown $meta->{uid}, $meta->{gid}, $filename or - return $self->_error("$filename: $!"); - } - if ($self->{opts}->{chmod}) { - my $mode = $meta->{mode}; - unless ($self->{opts}->{same_perms}) { - $mode &= ~(oct(7000) | (umask | 0)); - } - chmod $mode, $filename or return $self->_error("$filename: $!"); - } - utime $meta->{date}, $meta->{date}, $filename or - return $self->_error("$filename: $!"); - return 1; -} - -sub rename { - my $self = shift; - my $filename = shift; - my $target = shift; - - if ($self->{files}->{$filename}) { - $self->{files}->{$target} = $self->{files}->{$filename}; - delete $self->{files}->{$filename}; - for (@{$self->{names}}) { - if ($_ eq $filename) { - $_ = $target; - last; - } - } - } -} - -sub chmod { - my $self = shift; - my $filename = shift; - my $mode = shift; # octal string or numeric - - return unless $self->{files}->{$filename}; - $self->{files}->{$filename}->{mode} = - $mode + 0 eq $mode ? $mode : oct($mode); - return 1; -} - -sub chown { - my $self = shift; - my $filename = shift; - my $uid = shift; - my $gid = shift; - - return unless $self->{files}->{$filename}; - $self->{files}->{$filename}->{uid} = $uid if $uid >= 0; - $self->{files}->{$filename}->{gid} = $gid if defined $gid && $gid >= 0; - return 1; -} - -sub remove { - my $self = shift; - my $files = ref $_[0] ? shift : \@_; - - my $nfiles_orig = scalar @{$self->{names}}; - - for my $file (@$files) { - next unless $file; - if (exists($self->{files}->{$file})) { - delete $self->{files}->{$file}; - } - else { - $self->_error("$file: no such member") - } - } - @{$self->{names}} = grep($self->{files}->{$_}, @{$self->{names}}); - - return $nfiles_orig - scalar @{$self->{names}}; -} - -sub list_files { - my $self = shift; - - return wantarray ? @{$self->{names}} : $self->{names}; -} - -sub add_files { - my $self = shift; - my $files = ref $_[0] ? shift : \@_; - - for my $path (@$files) { - if (open my $fd, $path) { - my @st = stat $fd or return $self->_error("$path: $!"); - local $/ = undef; - binmode $fd; - my $content = <$fd>; - close $fd; - - my $filename = (File::Spec->splitpath($path))[2]; - - $self->_add_data($filename, $content, @st[9,4,5,2,7]); - } - else { - $self->_error("$path: $!"); - } - } - return scalar @{$self->{names}}; -} - -sub add_data { - my $self = shift; - my $path = shift; - my $content = shift; - my $params = shift || {}; - - return $self->_error("No filename given") unless $path; - - my $filename = (File::Spec->splitpath($path))[2]; - - $self->_add_data($filename, $content, - $params->{date} || timelocal(localtime()), - $params->{uid} || 0, - $params->{gid} || 0, - $params->{mode} || 0100644) or return; - - return $self->{files}->{$filename}->{size}; -} - -sub write { - my $self = shift; - my $filename = shift; - my $opts = {(%{$self->{opts}}, %{shift || {}})}; - my $type = $opts->{type} || $self->{type} || COMMON; - - my @body = ( ARMAG ); - - my %gnuindex; - my @filenames = @{$self->{names}}; - if ($type eq GNU) { - # - # construct extended filename index, if needed - # - if (my @longs = grep(length($_) > 15, @filenames)) { - my $ptr = 0; - for my $long (@longs) { - $gnuindex{$long} = $ptr; - $ptr += length($long) + 2; - } - push @body, pack('A16A32A10A2', '//', '', $ptr, ARFMAG), - join("/\n", @longs, ''); - push @body, "\n" if $ptr % 2; # padding - } - } - for my $fn (@filenames) { - my $meta = $self->{files}->{$fn}; - my $mode = sprintf('%o', $meta->{mode}); - my $size = $meta->{size}; - my $name; - - if ($type eq GNU) { - $fn = '' if defined $opts->{symbols} && $fn eq $opts->{symbols}; - $name = $fn . '/'; - } - else { - $name = $fn; - } - if (length($name) <= 16 || $type eq COMMON) { - push @body, pack('A16A12A6A6A8A10A2', $name, - @$meta{qw/date uid gid/}, $mode, $size, ARFMAG); - } - elsif ($type eq GNU) { - push @body, pack('A1A15A12A6A6A8A10A2', '/', $gnuindex{$fn}, - @$meta{qw/date uid gid/}, $mode, $size, ARFMAG); - } - elsif ($type eq BSD) { - $size += length($name); - push @body, pack('A3A13A12A6A6A8A10A2', AR_EFMT1, length($name), - @$meta{qw/date uid gid/}, $mode, $size, ARFMAG), - $name; - } - else { - return $self->_error("$type: unexpected ar type"); - } - push @body, $meta->{data}; - push @body, "\n" if $size % 2; # padding - } - if ($filename) { - my $fh = $self->_get_handle($filename, '>'); - print $fh @body; - close $fh; - my $len = 0; - $len += length($_) for @body; - return $len; - } - else { - return join '', @body; - } -} - -sub get_content { - my $self = shift; - my ($filename) = @_; - - unless ($filename) { - $self->_error("get_content can't continue without a filename"); - return; - } - - unless (exists($self->{files}->{$filename})) { - $self->_error( - "get_content failed because there is not a file named $filename"); - return; - } - - return $self->{files}->{$filename}; -} - -sub get_data { - my $self = shift; - my $filename = shift; - - return $self->_error("$filename: no such member") - unless exists $self->{files}->{$filename}; - return $self->{files}->{$filename}->{data}; -} - -sub get_handle { - my $self = shift; - my $filename = shift; - my $fh; - - return $self->_error("$filename: no such member") - unless exists $self->{files}->{$filename}; - if ($has_io_string) { - $fh = IO::String->new($self->{files}->{$filename}->{data}); - } - else { - my $data = $self->{files}->{$filename}->{data}; - open $fh, '<', \$data or return $self->_error("in-memory file: $!"); - } - return $fh; -} - -sub error { - my $self = shift; - - return shift() ? $self->{longmess} : $self->{error}; -} - -# -# deprecated -# -sub DEBUG { - my $self = shift; - my $debug = shift; - - $self->{opts}->{warn} = 1 unless (defined($debug) and int($debug) == 0); -} - -sub _parse { - my $self = shift; - my $data = shift; - - unless (substr($data, 0, SARMAG, '') eq ARMAG) { - return $self->_error("Bad magic number - not an ar archive"); - } - my $type; - my $names; - while ($data =~ /\S/) { - my ($name, $date, $uid, $gid, $mode, $size, $magic) = - unpack('A16A12A6A6A8A10a2', substr($data, 0, 60, '')); - unless ($magic eq "`\n") { - return $self->_error("Bad file header"); - } - if ($name =~ m|^/|) { - $type = GNU; - if ($name eq '//') { - $names = substr($data, 0, $size, ''); - substr($data, 0, $size % 2, ''); - next; - } - elsif ($name eq '/') { - $name = $self->{opts}->{symbols}; - unless (defined $name && $name) { - substr($data, 0, $size + $size % 2, ''); - next; - } - } - else { - $name = substr($names, int(substr($name, 1))); - $name =~ s/\n.*//; - chop $name; - } - } - elsif ($name =~ m|^#1/|) { - $type = BSD; - $name = substr($data, 0, int(substr($name, 3)), ''); - $size -= length($name); - } - else { - if ($name =~ m|/$|) { - $type ||= GNU; # only gnu has trailing slashes - chop $name; - } - } - $uid = int($uid); - $gid = int($gid); - $mode = oct($mode); - my $content = substr($data, 0, $size, ''); - substr($data, 0, $size % 2, ''); - - $self->_add_data($name, $content, $date, $uid, $gid, $mode, $size); - } - $self->{type} = $type || COMMON; - return scalar @{$self->{names}}; -} - -sub _add_data { - my $self = shift; - my $filename = shift; - my $content = shift || ''; - my $date = shift; - my $uid = shift; - my $gid = shift; - my $mode = shift; - my $size = shift; - - if (exists($self->{files}->{$filename})) { - return $self->_error("$filename: entry already exists"); - } - $self->{files}->{$filename} = { - name => $filename, - date => defined $date ? $date : timelocal(localtime()), - uid => defined $uid ? $uid : 0, - gid => defined $gid ? $gid : 0, - mode => defined $mode ? $mode : 0100644, - size => defined $size ? $size : length($content), - data => $content, - }; - push @{$self->{names}}, $filename; - return 1; -} - -sub _get_handle { - my $self = shift; - my $file = shift; - my $mode = shift || '<'; - - if (ref $file) { - return $file if eval{*$file{IO}} or $file->isa('IO::Handle'); - return $self->_error("Not a filehandle"); - } - else { - open my $fh, $mode, $file or return $self->_error("$file: $!"); - binmode $fh; - return $fh; - } -} - -sub _error { - my $self = shift; - my $msg = shift; - - $self->{error} = $msg; - $self->{longerror} = longmess($msg); - if ($self->{opts}->{warn} > 1) { - carp $self->{longerror}; - } - elsif ($self->{opts}->{warn}) { - carp $self->{error}; - } - return; -} - -1; - -__END__ - -=head1 NAME - -Archive::Ar - Interface for manipulating ar archives - -=head1 SYNOPSIS - - use Archive::Ar; - - my $ar = Archive::Ar->new; - - $ar->read('./foo.ar'); - $ar->extract; - - $ar->add_files('./bar.tar.gz', 'bat.pl') - $ar->add_data('newfile.txt','Some contents'); - - $ar->chmod('file1', 0644); - $ar->chown('file1', $uid, $gid); - - $ar->remove('file1', 'file2'); - - my $filehash = $ar->get_content('bar.tar.gz'); - my $data = $ar->get_data('bar.tar.gz'); - my $handle = $ar->get_handle('bar.tar.gz'); - - my @files = $ar->list_files(); - - my $archive = $ar->write; - my $size = $ar->write('outbound.ar'); - - $ar->error(); - - -=head1 DESCRIPTION - -Archive::Ar is a pure-perl way to handle standard ar archives. - -This is useful if you have those types of archives on the system, but it -is also useful because .deb packages for the Debian GNU/Linux distribution are -ar archives. This is one building block in a future chain of modules to build, -manipulate, extract, and test debian modules with no platform or architecture -dependence. - -You may notice that the API to Archive::Ar is similar to Archive::Tar, and -this was done intentionally to keep similarity between the Archive::* -modules. - -=head1 METHODS - -=head2 new - - $ar = Archive::Ar->new() - $ar = Archive::Ar->new($filename) - $ar = Archive::Ar->new($filehandle) - -Returns a new Archive::Ar object. Without an argument, it returns -an empty object. If passed a filename or an open filehandle, it will -read the referenced archive into memory. If the read fails for any -reason, returns undef. - -=head2 set_opt - - $ar->set_opt($name, $val) - -Assign option $name value $val. Possible options are: - -=over 4 - -=item * warn - -Warning level. Levels are zero for no warnings, 1 for brief warnings, -and 2 for warnings with a stack trace. Default is zero. - -=item * chmod - -Change the file permissions of files created when extracting. Default -is true (non-zero). - -=item * same_perms - -When setting file permissions, use the values in the archive unchanged. -If false, removes setuid bits and applies the user's umask. Default is -true for the root user, false otherwise. - -=item * chown - -Change the owners of extracted files, if possible. Default is true. - -=item * type - -Archive type. May be GNU, BSD or COMMON, or undef if no archive has -been read. Defaults to the type of the archive read, or undef. - -=item * symbols - -Provide a filename for the symbol table, if present. If set, the symbol -table is treated as a file that can be read from or written to an archive. -It is an error if the filename provided matches the name of a file in the -archive. If undefined, the symbol table is ignored. Defaults to undef. - -=back - -=head2 get_opt - - $val = $ar->get_opt($name) - -Returns the value of option $name. - -=head2 type - - $type = $ar->type() - -Returns the type of the ar archive. The type is undefined until an -archive is loaded. If the archive displays characteristics of a gnu-style -archive, GNU is returned. If it looks like a bsd-style archive, BSD -is returned. Otherwise, COMMON is returned. Note that unless filenames -exceed 16 characters in length, bsd archives look like the common format. - -=head2 clear - - $ar->clear() - -Clears the current in-memory archive. - -=head2 read - - $len = $ar->read($filename) - $len = $ar->read($filehandle) - -This reads a new file into the object, removing any ar archive already -represented in the object. The argument may be a filename, filehandle -or IO::Handle object. Returns the size of the file contents or undef -if it fails. - -=head2 read_memory - - $len = $ar->read_memory($data) - -Parses the string argument as an archive, reading it into memory. Replaces -any previously loaded archive. Returns the number of bytes read, or undef -if it fails. - -=head2 contains_file - - $bool = $ar->contains_file($filename) - -Returns true if the archive contains a file with $filename. Returns -undef otherwise. - -=head2 extract - - $ar->extract() - $ar->extract_file($filename) - -Extracts files from the archive. The first form extracts all files, the -latter extracts just the named file. Extracted files are assigned the -permissions and modification time stored in the archive, and, if possible, -the user and group ownership. Returns non-zero upon success, or undef if -failure. - -=head2 rename - - $ar->rename($filename, $newname) - -Changes the name of a file in the in-memory archive. - -=head2 chmod - - $ar->chmod($filename, $mode); - -Change the mode of the member to C<$mode>. - -=head2 chown - - $ar->chown($filename, $uid, $gid); - $ar->chown($filename, $uid); - -Change the ownership of the member to user id C<$uid> and (optionally) -group id C<$gid>. Negative id values are ignored. - -=head2 remove - - $ar->remove(@filenames) - $ar->remove($arrayref) - -Removes files from the in-memory archive. Returns the number of files -removed. - -=head2 list_files - - @filenames = $ar->list_files() - -Returns a list of the names of all the files in the archive. -If called in a scalar context, returns a reference to an array. - -=head2 add_files - - $ar->add_files(@filenames) - $ar->add_files($arrayref) - -Adds files to the archive. The arguments can be paths, but only the -filenames are stored in the archive. Stores the uid, gid, mode, size, -and modification timestamp of the file as returned by C. - -Returns the number of files successfully added, or undef if failure. - -=head2 add_data - - $ar->add_data("filename", $data) - $ar->add_data("filename", $data, $options) - -Adds a file to the in-memory archive with name $filename and content -$data. File properties can be set with $optional_hashref: - - $options = { - 'data' => $data, - 'uid' => $uid, #defaults to zero - 'gid' => $gid, #defaults to zero - 'date' => $date, #date in epoch seconds. Defaults to now. - 'mode' => $mode, #defaults to 0100644; - } - -You cannot add_data over another file however. This returns the file length in -bytes if it is successful, undef otherwise. - -=head2 write - - $data = $ar->write() - $len = $ar->write($filename) - -Returns the archive as a string, or writes it to disk as $filename. -Returns the archive size upon success when writing to disk. Returns -undef if failure. - -=head2 get_content - - $content = $ar->get_content($filename) - -This returns a hash with the file content in it, including the data -that the file would contain. If the file does not exist or no filename -is given, this returns undef. On success, a hash is returned: - - $content = { - 'name' => $filename, - 'date' => $mtime, - 'uid' => $uid, - 'gid' => $gid, - 'mode' => $mode, - 'size' => $size, - 'data' => $file_contents, - } - -=head2 get_data - - $data = $ar->get_data("filename") - -Returns a scalar containing the file data of the given archive -member. Upon error, returns undef. - -=head2 get_handle - - $handle = $ar->get_handle("filename")> - -Returns a file handle to the in-memory file data of the given archive member. -Upon error, returns undef. This can be useful for unpacking nested archives. -Uses IO::String if it's loaded. - -=head2 error - - $errstr = $ar->error($trace) - -Returns the current error string, which is usually the last error reported. -If a true value is provided, returns the error message and stack trace. - -=head1 BUGS - -See https://github.com/jbazik/Archive-Ar/issues/ to report and view bugs. - -=head1 SOURCE - -The source code repository for Archive::Ar can be found at http://github.com/jbazik/Archive-Ar/. - -=head1 COPYRIGHT - -Copyright 2009-2014 John Bazik Ejbazik@cpan.orgE. - -Copyright 2003 Jay Bonci Ejaybonci@cpan.orgE. - -This program is free software; you can redistribute it and/or modify it under -the same terms as Perl itself. - -See http://www.perl.com/perl/misc/Artistic.html - -=cut diff --git a/distribution/deb/pom.xml b/distribution/deb/pom.xml index 6ccb9045d72..c944dcc7c04 100644 --- a/distribution/deb/pom.xml +++ b/distribution/deb/pom.xml @@ -9,8 +9,14 @@ 2.0.0-beta1-SNAPSHOT - elasticsearch-deb + org.elasticsearch.distribution.deb + elasticsearch Elasticsearch DEB Distribution + + false @@ -19,8 +25,8 @@ - org.elasticsearch.distribution - elasticsearch-fully-loaded + org.elasticsearch.distribution.fully-loaded + elasticsearch ${elasticsearch.version} pom @@ -90,7 +96,6 @@ jdeb org.vafer - 1.4 ${project.build.directory}/releases/elasticsearch-${project.version}.deb @@ -253,33 +258,11 @@ + org.apache.maven.plugins maven-antrun-plugin - 1.8 - - check-license - verify - - run - - - - - - - Running license check - - - - - - - - - - integ-setup diff --git a/distribution/fully-loaded/pom.xml b/distribution/fully-loaded/pom.xml index babca43359e..9fef31640d9 100644 --- a/distribution/fully-loaded/pom.xml +++ b/distribution/fully-loaded/pom.xml @@ -9,7 +9,8 @@ 2.0.0-beta1-SNAPSHOT - elasticsearch-fully-loaded + org.elasticsearch.distribution.fully-loaded + elasticsearch Elasticsearch with all optional dependencies pom diff --git a/distribution/pom.xml b/distribution/pom.xml index 6e3ae4e78c7..9a01416ba30 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -71,7 +71,6 @@ - elasticsearch-${project.version} @@ -97,8 +96,32 @@ org.apache.maven.plugins maven-antrun-plugin - - + + + + check-license + verify + + run + + + ${skip.integ.tests} + + + + + Running license check + + + + + + + + + + org.apache.maven.plugins @@ -132,6 +155,14 @@ + + org.apache.maven.plugins + maven-jar-plugin + + + true + + com.carrotsearch.randomizedtesting junit4-maven-plugin @@ -143,7 +174,7 @@ 1 - 127.0.0.1:9300 + 127.0.0.1:${integ.transport.port} diff --git a/distribution/rpm/pom.xml b/distribution/rpm/pom.xml index d596dbfcab8..86da5bba0f2 100644 --- a/distribution/rpm/pom.xml +++ b/distribution/rpm/pom.xml @@ -9,19 +9,25 @@ 2.0.0-beta1-SNAPSHOT - elasticsearch-rpm + org.elasticsearch.distribution.rpm + elasticsearch Elasticsearch RPM Distribution rpm - org.elasticsearch.distribution - elasticsearch-fully-loaded + org.elasticsearch.distribution.fully-loaded + elasticsearch ${elasticsearch.version} pom + + true + ${project.build.directory}/releases/ + + @@ -300,8 +306,7 @@ ${project.version} ${project.packaging} true - ${project.build.directory}/releases/ - elasticsearch-${project.version}.rpm + ${rpm.outputDirectory} @@ -311,32 +316,7 @@ org.apache.maven.plugins maven-antrun-plugin - 1.8 - - check-license - verify - - run - - - - true - - - - - Running license check - - - - - - - - - - integ-setup diff --git a/distribution/shaded/pom.xml b/distribution/shaded/pom.xml index f7704f94e9a..3163a8053b3 100644 --- a/distribution/shaded/pom.xml +++ b/distribution/shaded/pom.xml @@ -9,7 +9,8 @@ 2.0.0-beta1-SNAPSHOT - elasticsearch-shaded + org.elasticsearch.distribution.shaded + elasticsearch Elasticsearch Shaded Distribution @@ -21,13 +22,13 @@ - ${project.artifactId}-${project.version} org.apache.maven.plugins maven-jar-plugin + false true diff --git a/distribution/src/main/assemblies/common-bin.xml b/distribution/src/main/assemblies/common-bin.xml index 868521b28eb..f95368d3572 100644 --- a/distribution/src/main/assemblies/common-bin.xml +++ b/distribution/src/main/assemblies/common-bin.xml @@ -4,10 +4,11 @@ /lib true false + false - org.elasticsearch.distribution:* + *:pom diff --git a/distribution/src/main/resources/bin/plugin b/distribution/src/main/resources/bin/plugin index 15bf59ca7bb..797d8e3a8ff 100755 --- a/distribution/src/main/resources/bin/plugin +++ b/distribution/src/main/resources/bin/plugin @@ -69,15 +69,15 @@ fi while [ $# -gt 0 ]; do case $1 in -D*=*) - properties="$properties $1" + properties="$properties \"$1\"" ;; -D*) var=$1 shift - properties="$properties $var=$1" + properties="$properties \"$var\"=\"$1\"" ;; *) - args="$args $1" + args="$args \"$1\"" esac shift done @@ -88,7 +88,7 @@ if [ -e "$CONF_DIR" ]; then *-Des.default.path.conf=*|*-Des.path.conf=*) ;; *) - properties="$properties -Des.default.path.conf=$CONF_DIR" + properties="$properties -Des.default.path.conf=\"$CONF_DIR\"" ;; esac fi @@ -98,11 +98,11 @@ if [ -e "$CONF_FILE" ]; then *-Des.default.config=*|*-Des.config=*) ;; *) - properties="$properties -Des.default.config=$CONF_FILE" + properties="$properties -Des.default.config=\"$CONF_FILE\"" ;; esac fi export HOSTNAME=`hostname -s` -exec "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Xmx64m -Xms16m -Delasticsearch -Des.path.home="$ES_HOME" $properties -cp "$ES_HOME/lib/*" org.elasticsearch.plugins.PluginManagerCliParser $args +eval "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Xmx64m -Xms16m -Delasticsearch -Des.path.home="\"$ES_HOME\"" $properties -cp "\"$ES_HOME/lib/*\"" org.elasticsearch.plugins.PluginManagerCliParser $args diff --git a/distribution/src/test/resources/packaging/scripts/25_tar_plugins.bats b/distribution/src/test/resources/packaging/scripts/25_tar_plugins.bats index f8f1481699c..e4eccfaf1ec 100644 --- a/distribution/src/test/resources/packaging/scripts/25_tar_plugins.bats +++ b/distribution/src/test/resources/packaging/scripts/25_tar_plugins.bats @@ -257,3 +257,101 @@ setup() { run rm -rf "$TEMP_CONFIG_DIR" [ "$status" -eq 0 ] } + +@test "[TAR] install shield plugin to elasticsearch directory with a space" { + export ES_DIR="/tmp/elastic search" + + # Install the archive + install_archive + + # Checks that the archive is correctly installed + verify_archive_installation + + # Move the Elasticsearch installation to a directory with a space in it + rm -rf "$ES_DIR" + mv /tmp/elasticsearch "$ES_DIR" + + # Checks that plugin archive is available + [ -e "$SHIELD_ZIP" ] + + # Install Shield + run "$ES_DIR/bin/plugin" install elasticsearch/shield/latest -u "file://$SHIELD_ZIP" + [ "$status" -eq 0 ] + + # Checks that Shield is correctly installed + assert_file_exist "$ES_DIR/bin/shield" + assert_file_exist "$ES_DIR/bin/shield/esusers" + assert_file_exist "$ES_DIR/bin/shield/syskeygen" + assert_file_exist "$ES_DIR/config/shield" + assert_file_exist "$ES_DIR/config/shield/role_mapping.yml" + assert_file_exist "$ES_DIR/config/shield/roles.yml" + assert_file_exist "$ES_DIR/config/shield/users" + assert_file_exist "$ES_DIR/config/shield/users_roles" + assert_file_exist "$ES_DIR/plugins/shield" + + # Remove the plugin + run "$ES_DIR/bin/plugin" remove elasticsearch/shield/latest + [ "$status" -eq 0 ] + + # Checks that the plugin is correctly removed + assert_file_not_exist "$ES_DIR/bin/shield" + assert_file_exist "$ES_DIR/config/shield" + assert_file_exist "$ES_DIR/config/shield/role_mapping.yml" + assert_file_exist "$ES_DIR/config/shield/roles.yml" + assert_file_exist "$ES_DIR/config/shield/users" + assert_file_exist "$ES_DIR/config/shield/users_roles" + assert_file_not_exist "$ES_DIR/plugins/shield" + + #Cleanup our temporary Elasticsearch installation + rm -rf "$ES_DIR" +} + +@test "[TAR] install shield plugin from a directory with a space" { + + export SHIELD_ZIP_WITH_SPACE="/tmp/plugins with space/shield.zip" + + # Install the archive + install_archive + + # Checks that the archive is correctly installed + verify_archive_installation + + # Checks that plugin archive is available + [ -e "$SHIELD_ZIP" ] + + # Copy the shield plugin to a directory with a space in it + rm -f "$SHIELD_ZIP_WITH_SPACE" + mkdir -p "$(dirname "$SHIELD_ZIP_WITH_SPACE")" + cp $SHIELD_ZIP "$SHIELD_ZIP_WITH_SPACE" + + # Install Shield + run /tmp/elasticsearch/bin/plugin install elasticsearch/shield/latest -u "file://$SHIELD_ZIP_WITH_SPACE" + [ "$status" -eq 0 ] + + # Checks that Shield is correctly installed + assert_file_exist "/tmp/elasticsearch/bin/shield" + assert_file_exist "/tmp/elasticsearch/bin/shield/esusers" + assert_file_exist "/tmp/elasticsearch/bin/shield/syskeygen" + assert_file_exist "/tmp/elasticsearch/config/shield" + assert_file_exist "/tmp/elasticsearch/config/shield/role_mapping.yml" + assert_file_exist "/tmp/elasticsearch/config/shield/roles.yml" + assert_file_exist "/tmp/elasticsearch/config/shield/users" + assert_file_exist "/tmp/elasticsearch/config/shield/users_roles" + assert_file_exist "/tmp/elasticsearch/plugins/shield" + + # Remove the plugin + run /tmp/elasticsearch/bin/plugin remove elasticsearch/shield/latest + [ "$status" -eq 0 ] + + # Checks that the plugin is correctly removed + assert_file_not_exist "/tmp/elasticsearch/bin/shield" + assert_file_exist "/tmp/elasticsearch/config/shield" + assert_file_exist "/tmp/elasticsearch/config/shield/role_mapping.yml" + assert_file_exist "/tmp/elasticsearch/config/shield/roles.yml" + assert_file_exist "/tmp/elasticsearch/config/shield/users" + assert_file_exist "/tmp/elasticsearch/config/shield/users_roles" + assert_file_not_exist "/tmp/elasticsearch/plugins/shield" + + #Cleanup our plugin directory with a space + rm -rf "$SHIELD_ZIP_WITH_SPACE" +} diff --git a/distribution/tar/pom.xml b/distribution/tar/pom.xml index f62803ce499..3c41c5d87e5 100644 --- a/distribution/tar/pom.xml +++ b/distribution/tar/pom.xml @@ -9,13 +9,19 @@ 2.0.0-beta1-SNAPSHOT - elasticsearch-tar + org.elasticsearch.distribution.tar + elasticsearch Elasticsearch TAR Distribution + + - org.elasticsearch.distribution - elasticsearch-fully-loaded + org.elasticsearch.distribution.fully-loaded + elasticsearch ${elasticsearch.version} pom @@ -49,30 +55,7 @@ org.apache.maven.plugins maven-antrun-plugin - 1.8 - - check-license - verify - - run - - - - - - - Running license check - - - - - - - - - - diff --git a/distribution/zip/pom.xml b/distribution/zip/pom.xml index 4d3f19d93e1..afa3dc286eb 100644 --- a/distribution/zip/pom.xml +++ b/distribution/zip/pom.xml @@ -9,13 +9,19 @@ 2.0.0-beta1-SNAPSHOT - elasticsearch-zip + org.elasticsearch.distribution.zip + elasticsearch Elasticsearch ZIP Distribution + + - org.elasticsearch.distribution - elasticsearch-fully-loaded + org.elasticsearch.distribution.fully-loaded + elasticsearch ${elasticsearch.version} pom @@ -49,30 +55,7 @@ org.apache.maven.plugins maven-antrun-plugin - 1.8 - - check-license - verify - - run - - - - - - - Running license check - - - - - - - - - - @@ -83,7 +66,11 @@ - + + + + + diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc index 512de738e1d..215e8449f37 100644 --- a/docs/reference/cluster/nodes-info.asciidoc +++ b/docs/reference/cluster/nodes-info.asciidoc @@ -117,19 +117,3 @@ The result will look similar to: } } -------------------------------------------------- - -if your `plugin` data is subject to change use -`plugins.info_refresh_interval` to change or disable the caching -interval: - -[source,js] --------------------------------------------------- -# Change cache to 20 seconds -plugins.info_refresh_interval: 20s - -# Infinite cache -plugins.info_refresh_interval: -1 - -# Disable cache -plugins.info_refresh_interval: 0 --------------------------------------------------- diff --git a/docs/reference/mapping/types/nested-type.asciidoc b/docs/reference/mapping/types/nested-type.asciidoc index d507f535af8..1427c93b8f3 100644 --- a/docs/reference/mapping/types/nested-type.asciidoc +++ b/docs/reference/mapping/types/nested-type.asciidoc @@ -76,7 +76,7 @@ uses type `nested`: { "type1" : { "properties" : { - "users" : { + "user" : { "type" : "nested", "properties": { "first" : {"type": "string" }, @@ -99,7 +99,7 @@ You may want to index inner objects both as `nested` fields *and* as flattened { "type1" : { "properties" : { - "users" : { + "user" : { "type" : "nested", "include_in_parent": true, "properties": { diff --git a/docs/reference/query-dsl/multi-match-query.asciidoc b/docs/reference/query-dsl/multi-match-query.asciidoc index 940fd05ab2e..ecfaad76d6d 100644 --- a/docs/reference/query-dsl/multi-match-query.asciidoc +++ b/docs/reference/query-dsl/multi-match-query.asciidoc @@ -304,7 +304,7 @@ That solves one of the two problems. The problem of differing term frequencies is solved by _blending_ the term frequencies for all fields in order to even out the differences. In other words, `first_name:smith` will be treated as though it has the same weight as `last_name:smith`. (Actually, -`first_name:smith` is given a tiny advantage over `last_name:smith`, just to +`last_name:smith` is given a tiny advantage over `first_name:smith`, just to make the order of results more stable.) If you run the above query through the <>, it returns this diff --git a/docs/reference/setup/as-a-service.asciidoc b/docs/reference/setup/as-a-service.asciidoc index e325c1165f9..f603a13440b 100644 --- a/docs/reference/setup/as-a-service.asciidoc +++ b/docs/reference/setup/as-a-service.asciidoc @@ -1,5 +1,5 @@ [[setup-service]] -== Running As a Service on Linux +== Running as a Service on Linux In order to run elasticsearch as a service on your operating system, the provided packages try to make it as easy as possible for you to start and stop elasticsearch during reboot and upgrades. diff --git a/plugins/cloud-aws/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java b/plugins/cloud-aws/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java index 1e891836a07..91f06b67c00 100644 --- a/plugins/cloud-aws/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java +++ b/plugins/cloud-aws/src/main/java/org/elasticsearch/cloud/aws/blobstore/S3BlobStore.java @@ -77,11 +77,24 @@ public class S3BlobStore extends AbstractComponent implements BlobStore { // Also, if invalid security credentials are used to execute this method, the // client is not able to distinguish between bucket permission errors and // invalid credential errors, and this method could return an incorrect result. - if (!client.doesBucketExist(bucket)) { - if (region != null) { - client.createBucket(bucket, region); - } else { - client.createBucket(bucket); + int retry = 0; + while (retry <= maxRetries) { + try { + if (!client.doesBucketExist(bucket)) { + if (region != null) { + client.createBucket(bucket, region); + } else { + client.createBucket(bucket); + } + } + break; + } catch (AmazonClientException e) { + if (shouldRetry(e) && retry < maxRetries) { + retry++; + } else { + logger.debug("S3 client create bucket failed"); + throw e; + } } } } diff --git a/plugins/pom.xml b/plugins/pom.xml index b007dd7c72a..2d8e9387d9c 100644 --- a/plugins/pom.xml +++ b/plugins/pom.xml @@ -385,16 +385,14 @@ ${skip.integ.tests} - org.elasticsearch.distribution - elasticsearch-zip + org.elasticsearch.distribution.zip + elasticsearch ${elasticsearch.version} zip true - - elasticsearch-${elasticsearch.version}.zip + true ${integ.deps} @@ -411,7 +409,7 @@ 1 - 127.0.0.1:9300 + 127.0.0.1:${integ.transport.port} diff --git a/pom.xml b/pom.xml index 501f5a457c0..188ed85015e 100644 --- a/pom.xml +++ b/pom.xml @@ -109,6 +109,8 @@ ${project.build.directory}/integ-tests ${project.build.directory}/integ-deps ${integ.scratch}/temp + 9400 + 9500 \bno(n|)commit\b @@ -905,7 +907,7 @@ org.vafer jdeb - 1.3 + 1.4 true diff --git a/rest-api-spec/pom.xml b/rest-api-spec/pom.xml index 3b14e1722cd..178c55b8770 100644 --- a/rest-api-spec/pom.xml +++ b/rest-api-spec/pom.xml @@ -34,4 +34,4 @@ - \ No newline at end of file + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml index 0d955852c07..d8beff155c7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml @@ -46,8 +46,8 @@ /^( index1 \s+ (\d+) \s+ - (\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d.\d\d\d[+-]\d\d:\d\d) \s+ + (\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d.\d\d\dZ) \s+ (\d+) \s+ - (\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d.\d\d\d[+-]\d\d:\d\d) \s* + (\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d.\d\d\dZ) \s* ) $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.upgrade/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.upgrade/10_basic.yaml index d6a38b4c168..e696a5600bc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.upgrade/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.upgrade/10_basic.yaml @@ -18,4 +18,5 @@ indices.upgrade: index: test_index - - match: {upgraded_indices.test_index: '/(\d\.)+\d/'} + - match: {upgraded_indices.test_index.oldest_lucene_segment_version: '/(\d\.)+\d/'} + - is_true: upgraded_indices.test_index.upgrade_version