This commit moves the keystore cli into its own project, so that the test dependencies can be isolated from the rest of server.
This commit is contained in:
parent
0fa1060ca4
commit
3a24fe9d37
|
@ -267,6 +267,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
from { project(':distribution:tools:plugin-cli').jar }
|
||||
from { project(':distribution:tools:plugin-cli').configurations.runtime }
|
||||
}
|
||||
into('tools/keystore-cli') {
|
||||
from { project(':distribution:tools:keystore-cli').jar }
|
||||
}
|
||||
if (oss == false) {
|
||||
into('tools/security-cli') {
|
||||
from { project(':x-pack:plugin:security:cli').jar }
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#!/bin/bash
|
||||
|
||||
ES_MAIN_CLASS=org.elasticsearch.common.settings.KeyStoreCli \
|
||||
ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/keystore-cli \
|
||||
"`dirname "$0"`"/elasticsearch-cli \
|
||||
"$@"
|
||||
|
|
|
@ -4,6 +4,7 @@ setlocal enabledelayedexpansion
|
|||
setlocal enableextensions
|
||||
|
||||
set ES_MAIN_CLASS=org.elasticsearch.common.settings.KeyStoreCli
|
||||
set ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/keystore-cli
|
||||
call "%~dp0elasticsearch-cli.bat" ^
|
||||
%%* ^
|
||||
|| goto exit
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
apply plugin: 'elasticsearch.build'
|
||||
|
||||
dependencies {
|
||||
compileOnly project(":server")
|
||||
compileOnly project(":libs:elasticsearch-cli")
|
||||
testCompile project(":test:framework")
|
||||
testCompile 'com.google.jimfs:jimfs:1.1'
|
||||
testCompile 'com.google.guava:guava:18.0'
|
||||
}
|
|
@ -42,8 +42,10 @@ class AddFileKeyStoreCommand extends BaseKeyStoreCommand {
|
|||
|
||||
AddFileKeyStoreCommand() {
|
||||
super("Add a file setting to the keystore", false);
|
||||
this.forceOption = parser.acceptsAll(Arrays.asList("f", "force"),
|
||||
"Overwrite existing setting without prompting, creating keystore if necessary");
|
||||
this.forceOption = parser.acceptsAll(
|
||||
Arrays.asList("f", "force"),
|
||||
"Overwrite existing setting without prompting, creating keystore if necessary"
|
||||
);
|
||||
// jopt simple has issue with multiple non options, so we just get one set of them here
|
||||
// and convert to File when necessary
|
||||
// see https://github.com/jopt-simple/jopt-simple/issues/103
|
|
@ -48,8 +48,10 @@ class AddStringKeyStoreCommand extends BaseKeyStoreCommand {
|
|||
AddStringKeyStoreCommand() {
|
||||
super("Add a string settings to the keystore", false);
|
||||
this.stdinOption = parser.acceptsAll(Arrays.asList("x", "stdin"), "Read setting values from stdin");
|
||||
this.forceOption = parser.acceptsAll(Arrays.asList("f", "force"),
|
||||
"Overwrite existing setting without prompting, creating keystore if necessary");
|
||||
this.forceOption = parser.acceptsAll(
|
||||
Arrays.asList("f", "force"),
|
||||
"Overwrite existing setting without prompting, creating keystore if necessary"
|
||||
);
|
||||
this.arguments = parser.nonOptions("setting names");
|
||||
}
|
||||
|
|
@ -45,8 +45,7 @@ class CreateKeyStoreCommand extends KeyStoreAwareCommand {
|
|||
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception {
|
||||
try (SecureString password = options.has(passwordOption) ?
|
||||
readPassword(terminal, true) : new SecureString(new char[0])) {
|
||||
try (SecureString password = options.has(passwordOption) ? readPassword(terminal, true) : new SecureString(new char[0])) {
|
||||
Path keystoreFile = KeyStoreWrapper.keystorePath(env.configFile());
|
||||
if (Files.exists(keystoreFile)) {
|
||||
if (terminal.promptYesNo("An elasticsearch keystore already exists. Overwrite?", false) == false) {
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.common.settings;
|
||||
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
|
@ -90,16 +90,22 @@ public class BootstrapTests extends ESTestCase {
|
|||
public void testPassphraseTooLong() throws Exception {
|
||||
byte[] source = "hellohello!\n".getBytes(StandardCharsets.UTF_8);
|
||||
try (InputStream stream = new ByteArrayInputStream(source)) {
|
||||
expectThrows(RuntimeException.class, "Password exceeded maximum length of 10",
|
||||
() -> Bootstrap.readPassphrase(stream, MAX_PASSPHRASE_LENGTH));
|
||||
expectThrows(
|
||||
RuntimeException.class,
|
||||
"Password exceeded maximum length of 10",
|
||||
() -> Bootstrap.readPassphrase(stream, MAX_PASSPHRASE_LENGTH)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public void testNoPassPhraseProvided() throws Exception {
|
||||
byte[] source = "\r\n".getBytes(StandardCharsets.UTF_8);
|
||||
try (InputStream stream = new ByteArrayInputStream(source)) {
|
||||
expectThrows(RuntimeException.class, "Keystore passphrase required but none provided.",
|
||||
() -> Bootstrap.readPassphrase(stream, MAX_PASSPHRASE_LENGTH));
|
||||
expectThrows(
|
||||
RuntimeException.class,
|
||||
"Keystore passphrase required but none provided.",
|
||||
() -> Bootstrap.readPassphrase(stream, MAX_PASSPHRASE_LENGTH)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -256,9 +256,7 @@ public class AddStringKeyStoreCommandTests extends KeyStoreCommandTestCase {
|
|||
final String key = randomAlphaOfLength(4) + '@' + randomAlphaOfLength(4);
|
||||
final UserException e = expectThrows(UserException.class, () -> execute(key));
|
||||
final String exceptionString = "Setting name [" + key + "] does not match the allowed setting name pattern [[A-Za-z0-9_\\-.]+]";
|
||||
assertThat(
|
||||
e,
|
||||
hasToString(containsString(exceptionString)));
|
||||
assertThat(e, hasToString(containsString(exceptionString)));
|
||||
}
|
||||
|
||||
public void testAddToUnprotectedKeystore() throws Exception {
|
|
@ -114,8 +114,12 @@ public abstract class KeyStoreCommandTestCase extends CommandTestCase {
|
|||
}
|
||||
int eof = input.read();
|
||||
if (eof != -1) {
|
||||
fail("Found extra bytes in file stream from keystore, expected " + expectedBytes.length +
|
||||
" bytes but found 0x" + Integer.toHexString(eof));
|
||||
fail(
|
||||
"Found extra bytes in file stream from keystore, expected "
|
||||
+ expectedBytes.length
|
||||
+ " bytes but found 0x"
|
||||
+ Integer.toHexString(eof)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -137,8 +137,10 @@ public class KeyStoreWrapperTests extends ESTestCase {
|
|||
keystore.close();
|
||||
|
||||
assertThat(keystore.getSettingNames(), Matchers.hasItem(KeyStoreWrapper.SEED_SETTING.getKey()));
|
||||
final IllegalStateException exception = expectThrows(IllegalStateException.class,
|
||||
() -> keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()));
|
||||
final IllegalStateException exception = expectThrows(
|
||||
IllegalStateException.class,
|
||||
() -> keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey())
|
||||
);
|
||||
assertThat(exception.getMessage(), containsString("closed"));
|
||||
}
|
||||
|
||||
|
@ -306,9 +308,13 @@ public class KeyStoreWrapperTests extends ESTestCase {
|
|||
output.write(secret_value);
|
||||
}
|
||||
|
||||
private void possiblyAlterEncryptedBytes(IndexOutput indexOutput, byte[] salt, byte[] iv, byte[] encryptedBytes, int
|
||||
truncEncryptedDataLength)
|
||||
throws Exception {
|
||||
private void possiblyAlterEncryptedBytes(
|
||||
IndexOutput indexOutput,
|
||||
byte[] salt,
|
||||
byte[] iv,
|
||||
byte[] encryptedBytes,
|
||||
int truncEncryptedDataLength
|
||||
) throws Exception {
|
||||
indexOutput.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length);
|
||||
indexOutput.writeInt(salt.length);
|
||||
indexOutput.writeBytes(salt, salt.length);
|
||||
|
@ -451,8 +457,10 @@ public class KeyStoreWrapperTests extends ESTestCase {
|
|||
public void testLegacyV3() throws GeneralSecurityException, IOException {
|
||||
final Path configDir = createTempDir();
|
||||
final Path keystore = configDir.resolve("elasticsearch.keystore");
|
||||
try (InputStream is = KeyStoreWrapperTests.class.getResourceAsStream("/format-v3-elasticsearch.keystore");
|
||||
OutputStream os = Files.newOutputStream(keystore)) {
|
||||
try (
|
||||
InputStream is = KeyStoreWrapperTests.class.getResourceAsStream("/format-v3-elasticsearch.keystore");
|
||||
OutputStream os = Files.newOutputStream(keystore)
|
||||
) {
|
||||
final byte[] buffer = new byte[4096];
|
||||
int readBytes;
|
||||
while ((readBytes = is.read(buffer)) > 0) {
|
|
@ -50,8 +50,10 @@ public class UpgradeKeyStoreCommandTests extends KeyStoreCommandTestCase {
|
|||
|
||||
public void testKeystoreUpgrade() throws Exception {
|
||||
final Path keystore = KeyStoreWrapper.keystorePath(env.configFile());
|
||||
try (InputStream is = KeyStoreWrapperTests.class.getResourceAsStream("/format-v3-elasticsearch.keystore");
|
||||
OutputStream os = Files.newOutputStream(keystore)) {
|
||||
try (
|
||||
InputStream is = KeyStoreWrapperTests.class.getResourceAsStream("/format-v3-elasticsearch.keystore");
|
||||
OutputStream os = Files.newOutputStream(keystore)
|
||||
) {
|
||||
final byte[] buffer = new byte[4096];
|
||||
int read;
|
||||
while ((read = is.read(buffer, 0, buffer.length)) >= 0) {
|
|
@ -132,7 +132,6 @@ dependencies {
|
|||
exclude group: 'org.elasticsearch', module: 'server'
|
||||
}
|
||||
|
||||
testCompile 'com.google.jimfs:jimfs:1.1'
|
||||
testCompile 'com.google.guava:guava:18.0'
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.close.CloseIndexResponse.IndexResult;
|
||||
|
@ -455,7 +454,8 @@ public class MetaDataIndexStateServiceTests extends ESTestCase {
|
|||
|
||||
final Snapshot snapshot = new Snapshot(randomAlphaOfLength(10), new SnapshotId(randomAlphaOfLength(5), randomAlphaOfLength(5)));
|
||||
final RestoreInProgress.Entry entry =
|
||||
new RestoreInProgress.Entry("_uuid", snapshot, RestoreInProgress.State.INIT, ImmutableList.of(index), shardsBuilder.build());
|
||||
new RestoreInProgress.Entry("_uuid", snapshot, RestoreInProgress.State.INIT,
|
||||
Collections.singletonList(index), shardsBuilder.build());
|
||||
return ClusterState.builder(newState)
|
||||
.putCustom(RestoreInProgress.TYPE, new RestoreInProgress.Builder().add(entry).build())
|
||||
.build();
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.FieldMapper.CopyTo;
|
||||
|
@ -27,6 +26,8 @@ import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
|
|||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.AfterClass;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
@ -38,11 +39,7 @@ public class ObjectMapperMergeTests extends ESTestCase {
|
|||
private static FieldMapper barFieldMapper = createTextFieldMapper("bar");
|
||||
private static FieldMapper bazFieldMapper = createTextFieldMapper("baz");
|
||||
|
||||
private static RootObjectMapper rootObjectMapper = createRootObjectMapper(
|
||||
"type1", true, ImmutableMap.of(
|
||||
"disabled", createObjectMapper("disabled", false, emptyMap()),
|
||||
"foo", createObjectMapper("foo", true, ImmutableMap.of(
|
||||
"bar", barFieldMapper))));
|
||||
private static RootObjectMapper rootObjectMapper = createMapping(false, true, true, false);
|
||||
|
||||
@AfterClass
|
||||
public static void cleanupReferences() {
|
||||
|
@ -51,14 +48,24 @@ public class ObjectMapperMergeTests extends ESTestCase {
|
|||
rootObjectMapper = null;
|
||||
}
|
||||
|
||||
private static RootObjectMapper createMapping(boolean disabledFieldEnabled, boolean fooFieldEnabled,
|
||||
boolean includeBarField, boolean includeBazField) {
|
||||
Map<String, Mapper> mappers = new HashMap<>();
|
||||
mappers.put("disabled", createObjectMapper("disabled", disabledFieldEnabled, emptyMap()));
|
||||
Map<String, Mapper> fooMappers = new HashMap<>();
|
||||
if (includeBarField) {
|
||||
fooMappers.put("bar", barFieldMapper);
|
||||
}
|
||||
if (includeBazField) {
|
||||
fooMappers.put("baz", bazFieldMapper);
|
||||
}
|
||||
mappers.put("foo", createObjectMapper("foo", fooFieldEnabled, Collections.unmodifiableMap(fooMappers)));
|
||||
return createRootObjectMapper("type1", true, Collections.unmodifiableMap(mappers));
|
||||
}
|
||||
|
||||
public void testMerge() {
|
||||
// GIVEN an enriched mapping with "baz" new field
|
||||
ObjectMapper mergeWith = createRootObjectMapper(
|
||||
"type1", true, ImmutableMap.of(
|
||||
"disabled", createObjectMapper("disabled", false, emptyMap()),
|
||||
"foo", createObjectMapper("foo", true, ImmutableMap.of(
|
||||
"bar", barFieldMapper,
|
||||
"baz", bazFieldMapper))));
|
||||
ObjectMapper mergeWith = createMapping(false, true, true, true);
|
||||
|
||||
// WHEN merging mappings
|
||||
final ObjectMapper merged = rootObjectMapper.merge(mergeWith);
|
||||
|
@ -71,10 +78,7 @@ public class ObjectMapperMergeTests extends ESTestCase {
|
|||
|
||||
public void testMergeWhenDisablingField() {
|
||||
// GIVEN a mapping with "foo" field disabled
|
||||
ObjectMapper mergeWith = createRootObjectMapper(
|
||||
"type1", true, ImmutableMap.of(
|
||||
"disabled", createObjectMapper("disabled", false, emptyMap()),
|
||||
"foo", createObjectMapper("foo", false, emptyMap())));
|
||||
ObjectMapper mergeWith = createMapping(false, false, false, false);
|
||||
|
||||
// WHEN merging mappings
|
||||
// THEN a MapperException is thrown with an excepted message
|
||||
|
@ -84,11 +88,7 @@ public class ObjectMapperMergeTests extends ESTestCase {
|
|||
|
||||
public void testMergeWhenEnablingField() {
|
||||
// GIVEN a mapping with "disabled" field enabled
|
||||
ObjectMapper mergeWith = createRootObjectMapper(
|
||||
"type1", true, ImmutableMap.of(
|
||||
"disabled", createObjectMapper("disabled", true, emptyMap()),
|
||||
"foo", createObjectMapper("foo", true, ImmutableMap.of(
|
||||
"bar", barFieldMapper))));
|
||||
ObjectMapper mergeWith = createMapping(true, true, true, false);
|
||||
|
||||
// WHEN merging mappings
|
||||
// THEN a MapperException is thrown with an excepted message
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.bucket.composite;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.time.DateFormatter;
|
||||
|
@ -278,11 +276,11 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
|
||||
public void testCompareCompositeKeyBiggerFieldName() {
|
||||
InternalComposite.ArrayMap key1 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{1, 2}
|
||||
);
|
||||
InternalComposite.ArrayMap key2 = createMap(
|
||||
Lists.newArrayList("field3", "field2"),
|
||||
Arrays.asList("field3", "field2"),
|
||||
new Comparable[]{1, 2}
|
||||
);
|
||||
assertThat(key1.compareTo(key2), lessThan(0));
|
||||
|
@ -290,11 +288,11 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
|
||||
public void testCompareCompositeKeySmallerFieldName() {
|
||||
InternalComposite.ArrayMap key1 = createMap(
|
||||
Lists.newArrayList("field3", "field2"),
|
||||
Arrays.asList("field3", "field2"),
|
||||
new Comparable[]{1, 2}
|
||||
);
|
||||
InternalComposite.ArrayMap key2 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{1, 2}
|
||||
);
|
||||
assertThat(key1.compareTo(key2), greaterThan(0));
|
||||
|
@ -302,11 +300,11 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
|
||||
public void testCompareCompositeKeyBiggerValue() {
|
||||
InternalComposite.ArrayMap key1 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{1, 2}
|
||||
);
|
||||
InternalComposite.ArrayMap key2 = createMap(
|
||||
Lists.newArrayList("field3", "field2"),
|
||||
Arrays.asList("field3", "field2"),
|
||||
new Comparable[]{2, 3}
|
||||
);
|
||||
assertThat(key1.compareTo(key2), lessThan(0));
|
||||
|
@ -314,11 +312,11 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
|
||||
public void testCompareCompositeKeySmallerValue() {
|
||||
InternalComposite.ArrayMap key1 = createMap(
|
||||
Lists.newArrayList("field3", "field2"),
|
||||
Arrays.asList("field3", "field2"),
|
||||
new Comparable[]{1, 2}
|
||||
);
|
||||
InternalComposite.ArrayMap key2 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{2, 3}
|
||||
);
|
||||
assertThat(key1.compareTo(key2), greaterThan(0));
|
||||
|
@ -326,11 +324,11 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
|
||||
public void testCompareCompositeKeyNullValueIsSmaller1() {
|
||||
InternalComposite.ArrayMap key1 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{null, 20}
|
||||
);
|
||||
InternalComposite.ArrayMap key2 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{1, 2}
|
||||
);
|
||||
assertThat(key1.compareTo(key2), lessThan(0));
|
||||
|
@ -338,11 +336,11 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
|
||||
public void testCompareCompositeKeyNullValueIsSmaller2() {
|
||||
InternalComposite.ArrayMap key1 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{1, 2}
|
||||
);
|
||||
InternalComposite.ArrayMap key2 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{null, 20}
|
||||
);
|
||||
assertThat(key1.compareTo(key2), greaterThan(0));
|
||||
|
@ -350,29 +348,29 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
|
||||
public void testCompareCompositeKeyMoreFieldsIsGreater() {
|
||||
InternalComposite.ArrayMap key1 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{1, 2}
|
||||
);
|
||||
InternalComposite.ArrayMap key2 = createMap(Lists.newArrayList("field1", "field2", "field3"),new Comparable[]{1, 2, null});
|
||||
InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2", "field3"),new Comparable[]{1, 2, null});
|
||||
assertThat(key1.compareTo(key2), lessThan(0));
|
||||
}
|
||||
|
||||
public void testCompareCompositeKeyLessFieldsIsLesser() {
|
||||
InternalComposite.ArrayMap key1 = createMap(
|
||||
Lists.newArrayList("field1", "field2", "field3"),
|
||||
Arrays.asList("field1", "field2", "field3"),
|
||||
new Comparable[]{1, 2, null}
|
||||
);
|
||||
InternalComposite.ArrayMap key2 = createMap(Lists.newArrayList("field1", "field2"),new Comparable[]{1, 2});
|
||||
InternalComposite.ArrayMap key2 = createMap(Arrays.asList("field1", "field2"),new Comparable[]{1, 2});
|
||||
assertThat(key1.compareTo(key2), greaterThan(0));
|
||||
}
|
||||
|
||||
public void testCompareCompositeKeyEqual() {
|
||||
InternalComposite.ArrayMap key1 = createMap(
|
||||
Lists.newArrayList("field1", "field2", "field3"),
|
||||
Arrays.asList("field1", "field2", "field3"),
|
||||
new Comparable[]{null, 1, 2}
|
||||
);
|
||||
InternalComposite.ArrayMap key2 = createMap(
|
||||
Lists.newArrayList("field1", "field2", "field3"),
|
||||
Arrays.asList("field1", "field2", "field3"),
|
||||
new Comparable[]{null, 1, 2}
|
||||
);
|
||||
assertThat(key1.compareTo(key1), equalTo(0));
|
||||
|
@ -385,12 +383,12 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
|
||||
public void testCompareCompositeKeyValuesHaveDifferentTypes() {
|
||||
InternalComposite.ArrayMap key1 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{1, 2}
|
||||
);
|
||||
|
||||
InternalComposite.ArrayMap key2 = createMap(
|
||||
Lists.newArrayList("field1", "field2"),
|
||||
Arrays.asList("field1", "field2"),
|
||||
new Comparable[]{"1", 2}
|
||||
);
|
||||
|
||||
|
|
|
@ -57,6 +57,7 @@ List projects = [
|
|||
'distribution:tools:java-version-checker',
|
||||
'distribution:tools:launchers',
|
||||
'distribution:tools:plugin-cli',
|
||||
'distribution:tools:keystore-cli',
|
||||
'server',
|
||||
'server:cli',
|
||||
'test:framework',
|
||||
|
|
Loading…
Reference in New Issue