Add version for 5.0.0

This commit adds the version constant for 5.0.0.

Relates #21244
This commit is contained in:
Jason Tedor 2016-11-01 13:51:53 -04:00
parent a612e5988e
commit 7751049c14
13 changed files with 94 additions and 26 deletions

View File

@ -89,6 +89,8 @@ public class Version {
public static final Version V_5_0_0_beta1 = new Version(V_5_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
public static final int V_5_0_0_rc1_ID = 5000051;
public static final Version V_5_0_0_rc1 = new Version(V_5_0_0_rc1_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
public static final int V_5_0_0_ID = 5000099;
public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
public static final int V_6_0_0_alpha1_ID = 6000001;
public static final Version V_6_0_0_alpha1 = new Version(V_6_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_3_0);
public static final Version CURRENT = V_6_0_0_alpha1;
@ -115,6 +117,8 @@ public class Version {
switch (id) {
case V_6_0_0_alpha1_ID:
return V_6_0_0_alpha1;
case V_5_0_0_ID:
return V_5_0_0;
case V_5_0_0_rc1_ID:
return V_5_0_0_rc1;
case V_5_0_0_beta1_ID:

View File

@ -276,7 +276,6 @@ public class VersionTests extends ESTestCase {
assertUnknownVersion(OsStats.V_5_1_0); // once we released 5.1.0 and it's added to Version.java we need to remove this constant
assertUnknownVersion(SimpleQueryStringBuilder.V_5_1_0_UNRELEASED);
// once we released 5.0.0 and it's added to Version.java we need to remove this constant
assertUnknownVersion(ShardValidateQueryRequestTests.V_5_0_0);
}
public static void assertUnknownVersion(Version version) {

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.action;
import org.elasticsearch.Version;
import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -92,7 +93,7 @@ public class ExplainRequestTests extends ESTestCase {
.decode("AAABBWluZGV4BHR5cGUCaWQBDHNvbWVfcm91dGluZwEOdGhlX3ByZWZlcmVuY2UEdGVybT" +
"+AAAAABWZpZWxkFQV2YWx1ZQIGYWxpYXMwBmFsaWFzMQECBmZpZWxkMQZmaWVsZDIBAQEIZmllbGQxLioBCGZpZWxkMi4qAA"));
try (StreamInput in = new NamedWriteableAwareStreamInput(requestBytes.streamInput(), namedWriteableRegistry)) {
in.setVersion(ShardValidateQueryRequestTests.V_5_0_0);
in.setVersion(Version.V_5_0_0);
ExplainRequest readRequest = new ExplainRequest();
readRequest.readFrom(in);
assertEquals(0, in.available());
@ -104,7 +105,7 @@ public class ExplainRequestTests extends ESTestCase {
assertEquals(request.routing(), readRequest.routing());
assertEquals(request.fetchSourceContext(), readRequest.fetchSourceContext());
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(ShardValidateQueryRequestTests.V_5_0_0);
output.setVersion(Version.V_5_0_0);
readRequest.writeTo(output);
assertEquals(output.bytes().toBytesRef(), requestBytes.toBytesRef());
}

View File

@ -42,7 +42,6 @@ import java.util.Collections;
import java.util.List;
public class ShardValidateQueryRequestTests extends ESTestCase {
public static final Version V_5_0_0 = Version.fromId(5000099);
protected NamedWriteableRegistry namedWriteableRegistry;
protected SearchRequestParsers searchRequestParsers;
@ -94,7 +93,7 @@ public class ShardValidateQueryRequestTests extends ESTestCase {
// this is a base64 encoded request generated with the same input
.decode("AAVpbmRleAZmb29iYXIBAQdpbmRpY2VzBAR0ZXJtP4AAAAAFZmllbGQVBXZhbHVlAgV0eXBlMQV0eXBlMgIGYWxpYXMwBmFsaWFzMQABAA"));
try (StreamInput in = new NamedWriteableAwareStreamInput(requestBytes.streamInput(), namedWriteableRegistry)) {
in.setVersion(V_5_0_0);
in.setVersion(Version.V_5_0_0);
ShardValidateQueryRequest readRequest = new ShardValidateQueryRequest();
readRequest.readFrom(in);
assertEquals(0, in.available());
@ -106,7 +105,7 @@ public class ShardValidateQueryRequestTests extends ESTestCase {
assertEquals(request.rewrite(), readRequest.rewrite());
assertEquals(request.shardId(), readRequest.shardId());
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(V_5_0_0);
output.setVersion(Version.V_5_0_0);
readRequest.writeTo(output);
assertEquals(output.bytes().toBytesRef(), requestBytes.toBytesRef());
}

View File

@ -82,6 +82,7 @@ import java.util.SortedSet;
import java.util.TreeSet;
import static org.elasticsearch.test.OldIndexUtils.assertUpgradeWorks;
import static org.elasticsearch.test.OldIndexUtils.getIndexDir;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@ -445,8 +446,15 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
throw new IllegalStateException("Backwards index must contain exactly one cluster");
}
// the bwc scripts packs the indices under this path
return list[0].resolve("nodes/0/");
int zipIndex = indexFile.indexOf(".zip");
final Version version = Version.fromString(indexFile.substring("index-".length(), zipIndex));
if (version.before(Version.V_5_0_0_alpha1)) {
// the bwc scripts packs the indices under this path
return list[0].resolve("nodes/0/");
} else {
// after 5.0.0, data folders do not include the cluster name
return list[0].resolve("0");
}
}
public void testOldClusterStates() throws Exception {
@ -481,9 +489,19 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
String indexName = indexFile.replace(".zip", "").toLowerCase(Locale.ROOT).replace("unsupported-", "index-");
Path nodeDir = getNodeDir(indexFile);
logger.info("Parsing cluster state files from index [{}]", indexName);
assertNotNull(globalFormat.loadLatestState(logger, nodeDir)); // no exception
Path indexDir = nodeDir.resolve("indices").resolve(indexName);
assertNotNull(indexFormat.loadLatestState(logger, indexDir)); // no exception
final MetaData metaData = globalFormat.loadLatestState(logger, nodeDir);
assertNotNull(metaData);
final Version version = Version.fromString(indexName.substring("index-".length()));
final Path dataDir;
if (version.before(Version.V_5_0_0_alpha1)) {
dataDir = nodeDir.getParent().getParent();
} else {
dataDir = nodeDir.getParent();
}
final Path indexDir = getIndexDir(logger, indexName, indexFile, dataDir);
assertNotNull(indexFormat.loadLatestState(logger, indexDir));
}
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.bwcompat;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.FileTestUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.set.Sets;
@ -70,7 +71,12 @@ public class RepositoryUpgradabilityIT extends AbstractSnapshotIntegTestCase {
final Set<SnapshotInfo> snapshotInfos = Sets.newHashSet(getSnapshots(repoName));
assertThat(snapshotInfos.size(), equalTo(1));
SnapshotInfo originalSnapshot = snapshotInfos.iterator().next();
assertThat(originalSnapshot.snapshotId(), equalTo(new SnapshotId("test_1", "test_1")));
if (Version.fromString(version).before(Version.V_5_0_0_alpha1)) {
assertThat(originalSnapshot.snapshotId(), equalTo(new SnapshotId("test_1", "test_1")));
} else {
assertThat(originalSnapshot.snapshotId().getName(), equalTo("test_1"));
assertNotNull(originalSnapshot.snapshotId().getUUID()); // it's a random UUID now
}
assertThat(Sets.newHashSet(originalSnapshot.indices()), equalTo(indices));
logger.info("--> restore the original snapshot");

View File

@ -211,11 +211,11 @@ public class IndexFolderUpgraderTests extends ESTestCase {
throw new IllegalStateException("Backwards index must contain exactly one cluster but was " + list.length);
}
// the bwc scripts packs the indices under this path
Path src = list[0].resolve("nodes/0/indices/" + indexName);
Path src = OldIndexUtils.getIndexDir(logger, indexName, path.getFileName().toString(), list[0]);
assertTrue("[" + path + "] missing index dir: " + src.toString(), Files.exists(src));
final Path indicesPath = randomFrom(nodeEnvironment.nodePaths()).indicesPath;
logger.info("--> injecting index [{}] into [{}]", indexName, indicesPath);
OldIndexUtils.copyIndex(logger, src, indexName, indicesPath);
OldIndexUtils.copyIndex(logger, src, src.getFileName().toString(), indicesPath);
IndexFolderUpgrader.upgradeIndicesIfNeeded(Settings.EMPTY, nodeEnvironment);
// ensure old index folder is deleted

View File

@ -1856,6 +1856,7 @@ public class InternalEngineTests extends ESTestCase {
return new Mapping(Version.CURRENT, root, new MetadataFieldMapper[0], emptyMap());
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/21147")
public void testUpgradeOldIndex() throws IOException {
List<Path> indexes = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(getBwcIndicesPath(), "index-*.zip")) {

View File

@ -190,7 +190,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase {
"ZXJtP4AAAAANbUtDSnpHU3lidm5KUBUMaVpqeG9vcm5QSFlvAAEBLGdtcWxuRWpWTXdvTlhMSHh0RWlFdHBnbEF1cUNmVmhoUVlwRFZxVllnWWV1A2ZvbwEA" +
"AQhwYWlubGVzc/8AALk4AAAAAAABAAAAAAAAAwpKU09PU0ZmWnhFClVqTGxMa2p3V2gKdUJwZ3R3dXFER5Hg97uT7MOmPgEADw"));
try (StreamInput in = new NamedWriteableAwareStreamInput(requestBytes.streamInput(), namedWriteableRegistry)) {
in.setVersion(ShardValidateQueryRequestTests.V_5_0_0);
in.setVersion(Version.V_5_0_0);
ShardSearchTransportRequest readRequest = new ShardSearchTransportRequest();
readRequest.readFrom(in);
assertEquals(0, in.available());
@ -214,7 +214,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase {
.should(QueryBuilders.termQuery("foo", "bar2"))
);
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(ShardValidateQueryRequestTests.V_5_0_0);
output.setVersion(Version.V_5_0_0);
readRequest.writeTo(output);
assertEquals(output.bytes().toBytesRef(), requestBytes.toBytesRef());
}

Binary file not shown.

Binary file not shown.

View File

@ -265,12 +265,20 @@ def generate_index(client, version, index_name):
mappings['doc'] = {'properties' : {}}
supports_dots_in_field_names = parse_version(version) >= parse_version("2.4.0")
if supports_dots_in_field_names:
mappings["doc"]['properties'].update({
if parse_version(version) < parse_version("5.0.0-alpha1"):
mappings["doc"]['properties'].update({
'field.with.dots': {
'type': 'string',
'boost': 4
}
})
else:
mappings["doc"]['properties'].update({
'field.with.dots': {
'type': 'text'
}
})
if parse_version(version) < parse_version("5.0.0-alpha1"):
mappings['norms'] = {
@ -339,7 +347,10 @@ def generate_index(client, version, index_name):
if warmers:
body['warmers'] = warmers
client.indices.create(index=index_name, body=body)
health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0)
if parse_version(version) < parse_version("5.0.0-alpha1"):
health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0)
else:
health = client.cluster.health(wait_for_status='green', wait_for_no_relocating_shards=True)
assert health['timed_out'] == False, 'cluster health timed out %s' % health
num_docs = random.randint(2000, 3000)

View File

@ -30,6 +30,7 @@ import org.elasticsearch.action.admin.indices.segments.ShardSegments;
import org.elasticsearch.action.admin.indices.upgrade.get.IndexUpgradeStatus;
import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.Loggers;
@ -56,7 +57,10 @@ import static junit.framework.TestCase.assertFalse;
import static junit.framework.TestCase.assertTrue;
import static org.elasticsearch.test.ESTestCase.randomInt;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class OldIndexUtils {
@ -103,10 +107,35 @@ public class OldIndexUtils {
throw new IllegalStateException("Backwards index must contain exactly one cluster");
}
// the bwc scripts packs the indices under this path
Path src = list[0].resolve("nodes/0/indices/" + indexName);
assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src));
copyIndex(logger, src, indexName, paths);
final Path src = getIndexDir(logger, indexName, indexFile, list[0]);
copyIndex(logger, src, src.getFileName().toString(), paths);
}
public static Path getIndexDir(
final Logger logger,
final String indexName,
final String indexFile,
final Path dataDir) throws IOException {
final Version version = Version.fromString(indexName.substring("index-".length()));
if (version.before(Version.V_5_0_0_alpha1)) {
// the bwc scripts packs the indices under this path
Path src = dataDir.resolve("nodes/0/indices/" + indexName);
assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src));
return src;
} else {
final List<Path> indexFolders = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(dataDir.resolve("0/indices"))) {
for (final Path path : stream) {
indexFolders.add(path);
}
}
assertThat(indexFolders.size(), equalTo(1));
final IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, indexFolders.get(0));
assertNotNull(indexMetaData);
assertThat(indexFolders.get(0).getFileName().toString(), equalTo(indexMetaData.getIndexUUID()));
assertThat(indexMetaData.getCreationVersion(), equalTo(version));
return indexFolders.get(0);
}
}
public static void assertNotUpgraded(Client client, String... index) throws Exception {
@ -128,10 +157,10 @@ public class OldIndexUtils {
}
// randomly distribute the files from src over dests paths
public static void copyIndex(final Logger logger, final Path src, final String indexName, final Path... dests) throws IOException {
public static void copyIndex(final Logger logger, final Path src, final String folderName, final Path... dests) throws IOException {
Path destinationDataPath = dests[randomInt(dests.length - 1)];
for (Path dest : dests) {
Path indexDir = dest.resolve(indexName);
Path indexDir = dest.resolve(folderName);
assertFalse(Files.exists(indexDir));
Files.createDirectories(indexDir);
}
@ -140,7 +169,7 @@ public class OldIndexUtils {
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
Path relativeDir = src.relativize(dir);
for (Path dest : dests) {
Path destDir = dest.resolve(indexName).resolve(relativeDir);
Path destDir = dest.resolve(folderName).resolve(relativeDir);
Files.createDirectories(destDir);
}
return FileVisitResult.CONTINUE;
@ -155,7 +184,7 @@ public class OldIndexUtils {
}
Path relativeFile = src.relativize(file);
Path destFile = destinationDataPath.resolve(indexName).resolve(relativeFile);
Path destFile = destinationDataPath.resolve(folderName).resolve(relativeFile);
logger.trace("--> Moving {} to {}", relativeFile, destFile);
Files.move(file, destFile);
assertFalse(Files.exists(file));