Use expectThrows() instead of try-catch blocks for testing expected exceptions

This commit is contained in:
Christoph Büscher 2016-10-17 13:05:03 +02:00
parent 3d3dd7185d
commit c63c5fa3f2
19 changed files with 305 additions and 608 deletions

View File

@ -97,41 +97,24 @@ public class VersionTests extends ESTestCase {
}
public void testTooLongVersionFromString() {
try {
Version.fromString("1.0.0.1.3");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
Exception e = expectThrows(IllegalArgumentException.class, () -> Version.fromString("1.0.0.1.3"));
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
}
}
public void testTooShortVersionFromString() {
try {
Version.fromString("1.0");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
Exception e = expectThrows(IllegalArgumentException.class, () -> Version.fromString("1.0"));
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
}
}
public void testWrongVersionFromString() {
try {
Version.fromString("WRONG.VERSION");
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
Exception e = expectThrows(IllegalArgumentException.class, () -> Version.fromString("WRONG.VERSION"));
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
}
}
public void testVersionNoPresentInSettings() {
try {
Version.indexCreated(Settings.builder().build());
fail("Expected IllegalArgumentException");
} catch (IllegalStateException e) {
Exception e = expectThrows(IllegalStateException.class, () -> Version.indexCreated(Settings.builder().build()));
assertThat(e.getMessage(), containsString("[index.version.created] is not present"));
}
}
public void testIndexCreatedVersion() {
// an actual index has a IndexMetaData.SETTING_INDEX_UUID

View File

@ -137,6 +137,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase {
THREAD_POOL = new TestThreadPool(TransportInstanceSingleOperationActionTests.class.getSimpleName());
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
@ -156,6 +157,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase {
);
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();

View File

@ -226,13 +226,9 @@ public class UpdateRequestTests extends ESTestCase {
// Related to issue #15822
public void testInvalidBodyThrowsParseException() throws Exception {
UpdateRequest request = new UpdateRequest("test", "type", "1");
try {
request.fromXContent(new byte[] { (byte) '"' });
fail("Should have thrown a ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> request.fromXContent(new byte[] { (byte) '"' }));
assertThat(e.getMessage(), equalTo("Failed to derive xcontent"));
}
}
// Related to issue 15338
public void testFieldsParsing() throws Exception {

View File

@ -177,43 +177,31 @@ public class DateMathExpressionResolverTests extends ESTestCase {
}
public void testExpressionInvalidUnescaped() throws Exception {
try {
expressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>"));
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class,
() -> expressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")));
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
assertThat(e.getMessage(), containsString("invalid character at position ["));
}
}
public void testExpressionInvalidDateMathFormat() throws Exception {
try {
expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>"));
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class,
() -> expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")));
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
}
}
public void testExpressionInvalidEmptyDateMathFormat() throws Exception {
try {
expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>"));
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class,
() -> expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")));
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
assertThat(e.getMessage(), containsString("missing date format"));
}
}
public void testExpressionInvalidOpenEnded() throws Exception {
try {
expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>"));
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class,
() -> expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")));
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
}
}
}

View File

@ -28,73 +28,45 @@ import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
public class TableTests extends ESTestCase {
public void testFailOnStartRowWithoutHeader() {
Table table = new Table();
try {
table.startRow();
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
Exception e = expectThrows(IllegalStateException.class, () -> table.startRow());
assertThat(e.getMessage(), is("no headers added..."));
}
}
public void testFailOnEndHeadersWithoutStart() {
Table table = new Table();
try {
table.endHeaders();
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
Exception e = expectThrows(IllegalStateException.class, () -> table.endHeaders());
assertThat(e.getMessage(), is("no headers added..."));
}
}
public void testFailOnAddCellWithoutHeader() {
Table table = new Table();
try {
table.addCell("error");
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
Exception e = expectThrows(IllegalStateException.class, () -> table.addCell("error"));
assertThat(e.getMessage(), is("no block started..."));
}
}
public void testFailOnAddCellWithoutRow() {
Table table = this.getTableWithHeaders();
try {
table.addCell("error");
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
Exception e = expectThrows(IllegalStateException.class, () -> table.addCell("error"));
assertThat(e.getMessage(), is("no block started..."));
}
}
public void testFailOnEndRowWithoutStart() {
Table table = this.getTableWithHeaders();
try {
table.endRow();
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
Exception e = expectThrows(IllegalStateException.class, () -> table.endRow());
assertThat(e.getMessage(), is("no row started..."));
}
}
public void testFailOnLessCellsThanDeclared() {
Table table = this.getTableWithHeaders();
table.startRow();
table.addCell("foo");
try {
table.endRow(true);
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
Exception e = expectThrows(IllegalStateException.class, () -> table.endRow());
assertThat(e.getMessage(), is("mismatch on number of cells 1 in a row compared to header 2"));
}
}
public void testOnLessCellsThanDeclaredUnchecked() {
Table table = this.getTableWithHeaders();
table.startRow();
@ -107,15 +79,10 @@ public class TableTests extends ESTestCase {
table.startRow();
table.addCell("foo");
table.addCell("bar");
try {
table.addCell("foobar");
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
Exception e = expectThrows(IllegalStateException.class, () -> table.addCell("foobar"));
assertThat(e.getMessage(), is("can't add more cells to a row than the header"));
}
}
public void testSimple() {
Table table = this.getTableWithHeaders();
table.startRow();

View File

@ -19,12 +19,6 @@
package org.elasticsearch.common.geo;
import org.locationtech.spatial4j.exception.InvalidShapeException;
import org.locationtech.spatial4j.shape.Circle;
import org.locationtech.spatial4j.shape.Point;
import org.locationtech.spatial4j.shape.Rectangle;
import org.locationtech.spatial4j.shape.Shape;
import org.locationtech.spatial4j.shape.impl.PointImpl;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.LineString;
import com.vividsolutions.jts.geom.Polygon;
@ -35,6 +29,12 @@ import org.elasticsearch.common.geo.builders.PolygonBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilders;
import org.elasticsearch.test.ESTestCase;
import org.locationtech.spatial4j.exception.InvalidShapeException;
import org.locationtech.spatial4j.shape.Circle;
import org.locationtech.spatial4j.shape.Point;
import org.locationtech.spatial4j.shape.Rectangle;
import org.locationtech.spatial4j.shape.Shape;
import org.locationtech.spatial4j.shape.impl.PointImpl;
import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertMultiLineString;
import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertMultiPolygon;
@ -183,18 +183,14 @@ public class ShapeBuilderTests extends ESTestCase {
}
public void testPolygonSelfIntersection() {
try {
ShapeBuilders.newPolygon(new CoordinatesBuilder()
PolygonBuilder newPolygon = ShapeBuilders.newPolygon(new CoordinatesBuilder()
.coordinate(-40.0, 50.0)
.coordinate(40.0, 50.0)
.coordinate(-40.0, -50.0)
.coordinate(40.0, -50.0).close())
.build();
fail("Expected InvalidShapeException");
} catch (InvalidShapeException e) {
.coordinate(40.0, -50.0).close());
Exception e = expectThrows(InvalidShapeException.class, () -> newPolygon.build());
assertThat(e.getMessage(), containsString("Self-intersection at or near point (0.0"));
}
}
public void testGeoCircle() {
double earthCircumference = 40075016.69;
@ -550,13 +546,9 @@ public class ShapeBuilderTests extends ESTestCase {
.coordinate(179, -10)
.coordinate(164, 0)
));
try {
builder.close().build();
fail("Expected InvalidShapeException");
} catch (InvalidShapeException e) {
Exception e = expectThrows(InvalidShapeException.class, () -> builder.close().build());
assertThat(e.getMessage(), containsString("interior cannot share more than one point with the exterior"));
}
}
public void testBoundaryShapeWithTangentialHole() {
// test a shape with one tangential (shared) vertex for each hole (should pass)
@ -602,13 +594,9 @@ public class ShapeBuilderTests extends ESTestCase {
.coordinate(176, -10)
.coordinate(-177, 10)
));
try {
builder.close().build();
fail("Expected InvalidShapeException");
} catch (InvalidShapeException e) {
Exception e = expectThrows(InvalidShapeException.class, () -> builder.close().build());
assertThat(e.getMessage(), containsString("interior cannot share more than one point with the exterior"));
}
}
/**
* Test an enveloping polygon around the max mercator bounds
@ -659,11 +647,7 @@ public class ShapeBuilderTests extends ESTestCase {
.coordinate(-176, 4)
.coordinate(180, 0)
);
try {
builder.close().build();
fail("Expected InvalidShapeException");
} catch (InvalidShapeException e) {
Exception e = expectThrows(InvalidShapeException.class, () -> builder.close().build());
assertThat(e.getMessage(), containsString("duplicate consecutive coordinates at: ("));
}
}
}

View File

@ -97,14 +97,7 @@ public class BytesStreamsTests extends ESTestCase {
BytesStreamOutput out = new BytesStreamOutput();
// bulk-write with wrong args
try {
out.writeBytes(new byte[]{}, 0, 1);
fail("expected IllegalArgumentException: length > (size-offset)");
}
catch (IllegalArgumentException iax1) {
// expected
}
expectThrows(IllegalArgumentException.class, () -> out.writeBytes(new byte[]{}, 0, 1));
out.close();
}
@ -333,19 +326,22 @@ public class BytesStreamsTests extends ESTestCase {
}
public void testNamedWriteable() throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
try (BytesStreamOutput out = new BytesStreamOutput()) {
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new)
));
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new)));
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10),
randomAsciiOfLengthBetween(1, 10));
out.writeNamedWriteable(namedWriteableIn);
byte[] bytes = BytesReference.toBytes(out.bytes());
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) {
assertEquals(in.available(), bytes.length);
BaseNamedWriteable namedWriteableOut = in.readNamedWriteable(BaseNamedWriteable.class);
assertEquals(namedWriteableIn, namedWriteableOut);
assertEquals(0, in.available());
}
}
}
public void testNamedWriteableList() throws IOException {
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
@ -367,60 +363,62 @@ public class BytesStreamsTests extends ESTestCase {
}
public void testNamedWriteableNotSupportedWithoutWrapping() throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
try (BytesStreamOutput out = new BytesStreamOutput()) {
TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2");
out.writeNamedWriteable(testNamedWriteable);
StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
try {
in.readNamedWriteable(BaseNamedWriteable.class);
fail("Expected UnsupportedOperationException");
} catch (UnsupportedOperationException e) {
Exception e = expectThrows(UnsupportedOperationException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
assertThat(e.getMessage(), is("can't read named writeable from StreamInput"));
}
}
public void testNamedWriteableReaderReturnsNull() throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
try (BytesStreamOutput out = new BytesStreamOutput()) {
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null)
));
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null)));
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10),
randomAsciiOfLengthBetween(1, 10));
out.writeNamedWriteable(namedWriteableIn);
byte[] bytes = BytesReference.toBytes(out.bytes());
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) {
assertEquals(in.available(), bytes.length);
IOException e = expectThrows(IOException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
}
}
}
public void testOptionalWriteableReaderReturnsNull() throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeOptionalWriteable(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)));
StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
IOException e = expectThrows(IOException.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null));
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
}
}
public void testWriteableReaderReturnsWrongName() throws IOException {
BytesStreamOutput out = new BytesStreamOutput();
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) ->
new TestNamedWriteable(in) {
try (BytesStreamOutput out = new BytesStreamOutput()) {
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(
Collections.singletonList(new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME,
(StreamInput in) -> new TestNamedWriteable(in) {
@Override
public String getWriteableName() {
return "intentionally-broken";
}
})
));
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
})));
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10),
randomAsciiOfLengthBetween(1, 10));
out.writeNamedWriteable(namedWriteableIn);
byte[] bytes = BytesReference.toBytes(out.bytes());
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) {
assertEquals(in.available(), bytes.length);
AssertionError e = expectThrows(AssertionError.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
assertThat(e.getMessage(),
endsWith(" claims to have a different name [intentionally-broken] than it was read from [test-named-writeable]."));
}
}
}
public void testWriteStreamableList() throws IOException {
final int size = randomIntBetween(0, 5);
@ -551,32 +549,13 @@ public class BytesStreamsTests extends ESTestCase {
assertEquals(-1, out.position());
// writing a single byte must fail
try {
out.writeByte((byte)0);
fail("expected IllegalStateException: stream closed");
}
catch (IllegalStateException iex1) {
// expected
}
expectThrows(IllegalArgumentException.class, () -> out.writeByte((byte)0));
// writing in bulk must fail
try {
out.writeBytes(new byte[0], 0, 0);
fail("expected IllegalStateException: stream closed");
}
catch (IllegalStateException iex1) {
// expected
}
expectThrows(IllegalArgumentException.class, () -> out.writeBytes(new byte[0], 0, 0));
// toByteArray() must fail
try {
BytesReference.toBytes(out.bytes());
fail("expected IllegalStateException: stream closed");
}
catch (IllegalStateException iex1) {
// expected
}
expectThrows(IllegalArgumentException.class, () -> BytesReference.toBytes(out.bytes()));
}
// create & fill byte[] with randomized data
@ -587,16 +566,15 @@ public class BytesStreamsTests extends ESTestCase {
}
public void testReadWriteGeoPoint() throws IOException {
{
BytesStreamOutput out = new BytesStreamOutput();
try (BytesStreamOutput out = new BytesStreamOutput()) {;
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
out.writeGenericValue(geoPoint);
StreamInput wrap = out.bytes().streamInput();
GeoPoint point = (GeoPoint) wrap.readGenericValue();
assertEquals(point, geoPoint);
}
{
BytesStreamOutput out = new BytesStreamOutput();
try (BytesStreamOutput out = new BytesStreamOutput()) {
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
out.writeGeoPoint(geoPoint);
StreamInput wrap = out.bytes().streamInput();
@ -640,32 +618,34 @@ public class BytesStreamsTests extends ESTestCase {
assertNotEquals(mapKeys, reverseMapKeys);
BytesStreamOutput output = new BytesStreamOutput();
BytesStreamOutput reverseMapOutput = new BytesStreamOutput();
try (BytesStreamOutput output = new BytesStreamOutput(); BytesStreamOutput reverseMapOutput = new BytesStreamOutput()) {
output.writeMapWithConsistentOrder(map);
reverseMapOutput.writeMapWithConsistentOrder(reverseMap);
assertEquals(output.bytes(), reverseMapOutput.bytes());
}
}
public void testReadMapByUsingWriteMapWithConsistentOrder() throws IOException {
Map<String, String> streamOutMap =
randomMap(new HashMap<>(), randomIntBetween(2, 20),
() -> randomAsciiOfLength(5),
() -> randomAsciiOfLength(5));
BytesStreamOutput streamOut = new BytesStreamOutput();
try (BytesStreamOutput streamOut = new BytesStreamOutput()) {
streamOut.writeMapWithConsistentOrder(streamOutMap);
StreamInput in = StreamInput.wrap(BytesReference.toBytes(streamOut.bytes()));
Map<String, Object> streamInMap = in.readMap();
assertEquals(streamOutMap, streamInMap);
}
}
public void testWriteMapWithConsistentOrderWithLinkedHashMapShouldThrowAssertError() throws IOException {
BytesStreamOutput output = new BytesStreamOutput();
try (BytesStreamOutput output = new BytesStreamOutput()) {
Map<String, Object> map = new LinkedHashMap<>();
Throwable e = expectThrows(AssertionError.class, () -> output.writeMapWithConsistentOrder(map));
assertEquals(AssertionError.class, e.getClass());
}
}
private static <K, V> Map<K, V> randomMap(Map<K, V> map, int size, Supplier<K> keyGenerator, Supplier<V> valueGenerator) {
IntStream.range(0, size).forEach(i -> map.put(keyGenerator.get(), valueGenerator.get()));

View File

@ -123,49 +123,31 @@ public class ByteSizeValueTests extends ESTestCase {
}
public void testFailOnMissingUnits() {
try {
ByteSizeValue.parseBytesSizeValue("23", "test");
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("23", "test"));
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
}
}
public void testFailOnUnknownUnits() {
try {
ByteSizeValue.parseBytesSizeValue("23jw", "test");
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("23jw", "test"));
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
}
}
public void testFailOnEmptyParsing() {
try {
assertThat(ByteSizeValue.parseBytesSizeValue("", "emptyParsing").toString(), is("23kb"));
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class,
() -> assertThat(ByteSizeValue.parseBytesSizeValue("", "emptyParsing").toString(), is("23kb")));
assertThat(e.getMessage(), containsString("failed to parse setting [emptyParsing]"));
}
}
public void testFailOnEmptyNumberParsing() {
try {
assertThat(ByteSizeValue.parseBytesSizeValue("g", "emptyNumberParsing").toString(), is("23b"));
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class,
() -> assertThat(ByteSizeValue.parseBytesSizeValue("g", "emptyNumberParsing").toString(), is("23b")));
assertThat(e.getMessage(), containsString("failed to parse [g]"));
}
}
public void testNoDotsAllowed() {
try {
ByteSizeValue.parseBytesSizeValue("42b.", null, "test");
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("42b.", null, "test"));
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
}
}
public void testCompareEquality() {
long firstRandom = randomPositiveLong();

View File

@ -87,7 +87,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase {
currentNodes = DiscoveryNodes.builder();
currentNodes.masterNodeId("b").add(new DiscoveryNode("b", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT));
;
// version isn't taken into account, so randomize it to ensure this.
if (randomBoolean()) {
currentState.version(2);

View File

@ -41,7 +41,6 @@ import org.hamcrest.CoreMatchers;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.lang.NumberFormatException;
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
@ -256,86 +255,61 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.bytes());
try {
expectThrows(MapperParsingException.class, () ->
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", -91).field("lon", 1.3).endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
.bytes()));
}
try {
expectThrows(MapperParsingException.class, () ->
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 91).field("lon", 1.3).endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
.bytes()));
}
try {
expectThrows(MapperParsingException.class, () ->
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", -181).endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
.bytes()));
}
try {
expectThrows(MapperParsingException.class, () ->
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 181).endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
.bytes()));
}
try {
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", "-").field("lon", 1.3).endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
.bytes()));
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
}
try {
e = expectThrows(MapperParsingException.class, () ->
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", "-").endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
.bytes()));
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
}
try {
e = expectThrows(MapperParsingException.class, () ->
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", "-").field("lon", "-").endObject()
.endObject()
.bytes());
fail();
} catch (MapperParsingException e) {
.bytes()));
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
}
}
public void testNoValidateLegacyLatLonValues() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5);
@ -743,92 +717,84 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
}
if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
try {
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject()
.endObject().endObject().string();
parser.parse("type", new CompressedXContent(normalizeMapping));
} catch (MapperParsingException e) {
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject().endObject().endObject()
.string();
Exception e = expectThrows(MapperParsingException.class, () ->
parser.parse("type", new CompressedXContent(normalizeMapping)));
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [geohash : true]");
}
}
try {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
{
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("point").field("type", "geo_point");
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
}
String validateMapping = xContentBuilder.field("validate", true).endObject().endObject().endObject().endObject().string();
parser.parse("type", new CompressedXContent(validateMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
Exception e = expectThrows(MapperParsingException.class, () ->
parser.parse("type", new CompressedXContent(validateMapping)));
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]");
}
try {
{
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
}
String validateMapping = xContentBuilder.field("validate_lat", true).endObject().endObject().endObject().endObject().string();
parser.parse("type", new CompressedXContent(validateMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
Exception e = expectThrows(MapperParsingException.class, () ->
parser.parse("type", new CompressedXContent(validateMapping)));
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]");
}
try {
{
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
}
String validateMapping = xContentBuilder.field("validate_lon", true).endObject().endObject().endObject().endObject().string();
parser.parse("type", new CompressedXContent(validateMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
Exception e = expectThrows(MapperParsingException.class, () ->
parser.parse("type", new CompressedXContent(validateMapping)));
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]");
}
// test deprecated normalize
try {
{
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
}
String normalizeMapping = xContentBuilder.field("normalize", true).endObject().endObject().endObject().endObject().string();
parser.parse("type", new CompressedXContent(normalizeMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
Exception e = expectThrows(MapperParsingException.class, () ->
parser.parse("type", new CompressedXContent(normalizeMapping)));
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]");
}
try {
{
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
}
String normalizeMapping = xContentBuilder.field("normalize_lat", true).endObject().endObject().endObject().endObject().string();
parser.parse("type", new CompressedXContent(normalizeMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
Exception e = expectThrows(MapperParsingException.class, () ->
parser.parse("type", new CompressedXContent(normalizeMapping)));
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]");
}
try {
{
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
}
String normalizeMapping = xContentBuilder.field("normalize_lon", true).endObject().endObject().endObject().endObject().string();
parser.parse("type", new CompressedXContent(normalizeMapping));
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
Exception e = expectThrows(MapperParsingException.class, () ->
parser.parse("type", new CompressedXContent(normalizeMapping)));
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]");
}
}
@ -844,20 +810,17 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
.field("geohash", false).endObject().endObject().endObject().endObject().string();
try {
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false);
fail();
} catch (IllegalArgumentException e) {
Exception e = expectThrows(IllegalArgumentException.class, () ->
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false));
assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]"));
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash]"));
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash_precision]"));
}
// correct mapping and ensure no failures
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
String stage2MappingCorrect = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false);
mapperService.merge("type", new CompressedXContent(stage2MappingCorrect), MapperService.MergeReason.MAPPING_UPDATE, false);
}
public void testLegacyGeoHashSearch() throws Exception {

View File

@ -22,13 +22,8 @@ package org.elasticsearch.index.mapper;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.ObjectMapper.Dynamic;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
@ -366,22 +361,16 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
createIndex("test1").mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false);
// explicitly setting limit to 0 prevents nested fields
try {
Exception e = expectThrows(IllegalArgumentException.class, () ->
createIndex("test2", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false));
assertThat(e.getMessage(), containsString("Limit of nested fields [0] in index [test2] has been exceeded"));
}
// setting limit to 1 with 2 nested fields fails
try {
e = expectThrows(IllegalArgumentException.class, () ->
createIndex("test3", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 1).build())
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false));
assertThat(e.getMessage(), containsString("Limit of nested fields [1] in index [test3] has been exceeded"));
}
MapperService mapperService = createIndex("test4", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 2)
.build()).mapperService();
@ -391,12 +380,9 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
// adding new fields from different type is not ok
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type3").startObject("properties").startObject("nested3")
.field("type", "nested").startObject("properties").endObject().endObject().endObject().endObject().endObject().string();
try {
mapperService.merge("type3", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false);
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
e = expectThrows(IllegalArgumentException.class, () ->
mapperService.merge("type3", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false));
assertThat(e.getMessage(), containsString("Limit of nested fields [2] in index [test4] has been exceeded"));
}
// do not check nested fields limit if mapping is not updated
createIndex("test5", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())

View File

@ -30,20 +30,11 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.TimestampFieldMapper;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -57,7 +48,6 @@ import java.util.Collection;
import java.util.LinkedHashMap;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.elasticsearch.test.VersionUtils.randomVersion;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
@ -212,13 +202,10 @@ public class TimestampFieldMapperTests extends ESSingleNodeTestCase {
.field("default", (String) null)
.endObject()
.endObject().endObject();
try {
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
} catch (TimestampParsingException e) {
TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService()
.documentMapperParser().parse("type", new CompressedXContent(mapping.string())));
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
}
}
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
public void testTimestampMissingWithForcedNullDefaultShouldFail() throws Exception {
@ -229,13 +216,10 @@ public class TimestampFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject();
try {
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
} catch (TimestampParsingException e) {
TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService()
.documentMapperParser().parse("type", new CompressedXContent(mapping.string())));
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
}
}
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
public void testTimestampDefaultAndIgnore() throws Exception {
@ -247,13 +231,10 @@ public class TimestampFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.endObject().endObject();
try {
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set with ignore_missing set to false");
} catch (TimestampParsingException e) {
TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService()
.documentMapperParser().parse("type", new CompressedXContent(mapping.string())));
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false"));
}
}
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
public void testTimestampMissingShouldNotFail() throws Exception {

View File

@ -30,8 +30,8 @@ import org.elasticsearch.test.geo.RandomGeoGenerator;
import java.io.IOException;
import static org.hamcrest.Matchers.is;
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
import static org.hamcrest.Matchers.is;
public class GeoPointParsingTests extends ESTestCase {
static double TOLERANCE = 1E-5;
@ -112,14 +112,9 @@ public class GeoPointParsingTests extends ESTestCase {
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
}
}
public void testInvalidPointLatHashMix() throws IOException {
XContentBuilder content = JsonXContent.contentBuilder();
@ -130,13 +125,9 @@ public class GeoPointParsingTests extends ESTestCase {
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("field must be either lat/lon or geohash"));
}
}
public void testInvalidPointLonHashMix() throws IOException {
XContentBuilder content = JsonXContent.contentBuilder();
@ -147,13 +138,9 @@ public class GeoPointParsingTests extends ESTestCase {
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("field must be either lat/lon or geohash"));
}
}
public void testInvalidField() throws IOException {
XContentBuilder content = JsonXContent.contentBuilder();
@ -164,13 +151,9 @@ public class GeoPointParsingTests extends ESTestCase {
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
}
}
private static XContentParser objectLatLon(double lat, double lon) throws IOException {
XContentBuilder content = JsonXContent.contentBuilder();

View File

@ -19,8 +19,6 @@
package org.elasticsearch.index.search.geo;
import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.distance.DistanceUtils;
import org.apache.lucene.spatial.prefix.tree.Cell;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
@ -33,6 +31,8 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.test.ESTestCase;
import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.distance.DistanceUtils;
import java.io.IOException;
@ -439,65 +439,45 @@ public class GeoUtilsTests extends ESTestCase {
BytesReference jsonBytes = jsonBuilder().startObject().field("geohash", 1.0).endObject().bytes();
XContentParser parser = XContentHelper.createParser(jsonBytes);
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), containsString("geohash must be a string"));
}
}
public void testParseGeoPointLatNoLon() throws IOException {
double lat = 0.0;
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).endObject().bytes();
XContentParser parser = XContentHelper.createParser(jsonBytes);
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("field [lon] missing"));
}
}
public void testParseGeoPointLonNoLat() throws IOException {
double lon = 0.0;
BytesReference jsonBytes = jsonBuilder().startObject().field("lon", lon).endObject().bytes();
XContentParser parser = XContentHelper.createParser(jsonBytes);
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("field [lat] missing"));
}
}
public void testParseGeoPointLonWrongType() throws IOException {
double lat = 0.0;
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).field("lon", false).endObject().bytes();
XContentParser parser = XContentHelper.createParser(jsonBytes);
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("longitude must be a number"));
}
}
public void testParseGeoPointLatWrongType() throws IOException {
double lon = 0.0;
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", false).field("lon", lon).endObject().bytes();
XContentParser parser = XContentHelper.createParser(jsonBytes);
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("latitude must be a number"));
}
}
public void testParseGeoPointExtraField() throws IOException {
double lat = 0.0;
@ -505,13 +485,9 @@ public class GeoUtilsTests extends ESTestCase {
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("foo", true).endObject().bytes();
XContentParser parser = XContentHelper.createParser(jsonBytes);
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
}
}
public void testParseGeoPointLonLatGeoHash() throws IOException {
double lat = 0.0;
@ -521,13 +497,9 @@ public class GeoUtilsTests extends ESTestCase {
.bytes();
XContentParser parser = XContentHelper.createParser(jsonBytes);
parser.nextToken();
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash"));
}
}
public void testParseGeoPointArrayTooManyValues() throws IOException {
double lat = 0.0;
@ -539,13 +511,9 @@ public class GeoUtilsTests extends ESTestCase {
while (parser.currentToken() != Token.START_ARRAY) {
parser.nextToken();
}
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("only two values allowed"));
}
}
public void testParseGeoPointArrayWrongType() throws IOException {
double lat = 0.0;
@ -555,13 +523,9 @@ public class GeoUtilsTests extends ESTestCase {
while (parser.currentToken() != Token.START_ARRAY) {
parser.nextToken();
}
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("numeric value expected"));
}
}
public void testParseGeoPointInvalidType() throws IOException {
BytesReference jsonBytes = jsonBuilder().startObject().field("foo", 5).endObject().bytes();
@ -569,13 +533,9 @@ public class GeoUtilsTests extends ESTestCase {
while (parser.currentToken() != Token.VALUE_NUMBER) {
parser.nextToken();
}
try {
GeoUtils.parseGeoPoint(parser);
fail("Expected ElasticsearchParseException");
} catch (ElasticsearchParseException e) {
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
assertThat(e.getMessage(), is("geo_point expected"));
}
}
public void testPrefixTreeCellSizes() {
assertThat(GeoUtils.EARTH_SEMI_MAJOR_AXIS, equalTo(DistanceUtils.EARTH_EQUATORIAL_RADIUS_KM * 1000));

View File

@ -470,8 +470,6 @@ public class IndexShardTests extends IndexShardTestCase {
throw new RuntimeException(ex);
}
}
;
};
thread[i].start();
}
@ -1172,6 +1170,7 @@ public class IndexShardTests extends IndexShardTestCase {
throw new RuntimeException("boom");
}
@Override
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
return searcher;
}

View File

@ -64,9 +64,8 @@ public class ShardPathTests extends ESTestCase {
assumeTrue("This test tests multi data.path but we only got one", paths.length > 1);
int id = randomIntBetween(1, 10);
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, indexUUID, AllocationId.newInitializing()), paths);
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings));
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
Exception e = expectThrows(IllegalStateException.class, () ->
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)));
assertThat(e.getMessage(), containsString("more than one shard state found"));
}
}
@ -81,9 +80,8 @@ public class ShardPathTests extends ESTestCase {
Path path = randomFrom(paths);
int id = randomIntBetween(1, 10);
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), path);
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings));
fail("Expected IllegalStateException");
} catch (IllegalStateException e) {
Exception e = expectThrows(IllegalStateException.class, () ->
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)));
assertThat(e.getMessage(), containsString("expected: foobar on shard path"));
}
}
@ -91,13 +89,9 @@ public class ShardPathTests extends ESTestCase {
public void testIllegalCustomDataPath() {
Index index = new Index("foo", "foo");
final Path path = createTempDir().resolve(index.getUUID()).resolve("0");
try {
new ShardPath(true, path, path, new ShardId(index, 0));
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
Exception e = expectThrows(IllegalArgumentException.class, () -> new ShardPath(true, path, path, new ShardId(index, 0)));
assertThat(e.getMessage(), is("shard state path must be different to the data path when using custom data paths"));
}
}
public void testValidCtor() {
Index index = new Index("foo", "foo");

View File

@ -45,7 +45,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
@ -444,21 +443,15 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
public void testAllMissingStrict() throws Exception {
createIndex("test1");
try {
expectThrows(IndexNotFoundException.class, () ->
client().prepareSearch("test2")
.setQuery(matchAllQuery())
.execute().actionGet();
fail("Exception should have been thrown.");
} catch (IndexNotFoundException e) {
}
.execute().actionGet());
try {
expectThrows(IndexNotFoundException.class, () ->
client().prepareSearch("test2","test3")
.setQuery(matchAllQuery())
.execute().actionGet();
fail("Exception should have been thrown.");
} catch (IndexNotFoundException e) {
}
.execute().actionGet());
//you should still be able to run empty searches without things blowing up
client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet();

View File

@ -70,36 +70,27 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
public void testSimpleCloseMissingIndex() {
Client client = client();
try {
client.admin().indices().prepareClose("test1").execute().actionGet();
fail("Expected IndexNotFoundException");
} catch (IndexNotFoundException e) {
Exception e = expectThrows(IndexNotFoundException.class, () ->
client.admin().indices().prepareClose("test1").execute().actionGet());
assertThat(e.getMessage(), is("no such index"));
}
}
public void testSimpleOpenMissingIndex() {
Client client = client();
try {
client.admin().indices().prepareOpen("test1").execute().actionGet();
fail("Expected IndexNotFoundException");
} catch (IndexNotFoundException e) {
Exception e = expectThrows(IndexNotFoundException.class, () ->
client.admin().indices().prepareOpen("test1").execute().actionGet());
assertThat(e.getMessage(), is("no such index"));
}
}
public void testCloseOneMissingIndex() {
Client client = client();
createIndex("test1");
ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
assertThat(healthResponse.isTimedOut(), equalTo(false));
try {
client.admin().indices().prepareClose("test1", "test2").execute().actionGet();
fail("Expected IndexNotFoundException");
} catch (IndexNotFoundException e) {
Exception e = expectThrows(IndexNotFoundException.class, () ->
client.admin().indices().prepareClose("test1", "test2").execute().actionGet());
assertThat(e.getMessage(), is("no such index"));
}
}
public void testCloseOneMissingIndexIgnoreMissing() {
Client client = client();
@ -117,13 +108,10 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
createIndex("test1");
ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
assertThat(healthResponse.isTimedOut(), equalTo(false));
try {
client.admin().indices().prepareOpen("test1", "test2").execute().actionGet();
fail("Expected IndexNotFoundException");
} catch (IndexNotFoundException e) {
Exception e = expectThrows(IndexNotFoundException.class, () ->
client.admin().indices().prepareOpen("test1", "test2").execute().actionGet());
assertThat(e.getMessage(), is("no such index"));
}
}
public void testOpenOneMissingIndexIgnoreMissing() {
Client client = client();
@ -204,43 +192,31 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
public void testCloseNoIndex() {
Client client = client();
try {
client.admin().indices().prepareClose().execute().actionGet();
fail("Expected ActionRequestValidationException");
} catch (ActionRequestValidationException e) {
Exception e = expectThrows(ActionRequestValidationException.class, () ->
client.admin().indices().prepareClose().execute().actionGet());
assertThat(e.getMessage(), containsString("index is missing"));
}
}
public void testCloseNullIndex() {
Client client = client();
try {
client.admin().indices().prepareClose((String[])null).execute().actionGet();
fail("Expected ActionRequestValidationException");
} catch (ActionRequestValidationException e) {
Exception e = expectThrows(ActionRequestValidationException.class, () ->
client.admin().indices().prepareClose((String[])null).execute().actionGet());
assertThat(e.getMessage(), containsString("index is missing"));
}
}
public void testOpenNoIndex() {
Client client = client();
try {
client.admin().indices().prepareOpen().execute().actionGet();
fail("Expected ActionRequestValidationException");
} catch (ActionRequestValidationException e) {
Exception e = expectThrows(ActionRequestValidationException.class, () ->
client.admin().indices().prepareOpen().execute().actionGet());
assertThat(e.getMessage(), containsString("index is missing"));
}
}
public void testOpenNullIndex() {
Client client = client();
try {
client.admin().indices().prepareOpen((String[])null).execute().actionGet();
fail("Expected ActionRequestValidationException");
} catch (ActionRequestValidationException e) {
Exception e = expectThrows(ActionRequestValidationException.class, () ->
client.admin().indices().prepareOpen((String[])null).execute().actionGet());
assertThat(e.getMessage(), containsString("index is missing"));
}
}
public void testOpenAlreadyOpenedIndex() {
Client client = client();

View File

@ -535,14 +535,11 @@ public class SearchQueryIT extends ESIntegTestCase {
searchResponse = client().prepareSearch().setQuery(queryStringQuery("future:[now/d TO now+2M/d]").lowercaseExpandedTerms(false)).get();
assertHitCount(searchResponse, 1L);
try {
client().prepareSearch().setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lowercaseExpandedTerms(false)).get();
fail("expected SearchPhaseExecutionException (total failure)");
} catch (SearchPhaseExecutionException e) {
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch()
.setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lowercaseExpandedTerms(false)).get());
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(e.toString(), containsString("unit [D] not supported for date math"));
}
}
// Issue #7880
public void testDateRangeInQueryStringWithTimeZone_7880() {
@ -776,12 +773,7 @@ public class SearchQueryIT extends ESIntegTestCase {
searchResponse = client().prepareSearch().setQuery(matchQuery("double", "2")).get();
assertHitCount(searchResponse, 1L);
assertFirstHit(searchResponse, hasId("2"));
try {
client().prepareSearch().setQuery(matchQuery("double", "2 3 4")).get();
fail("SearchPhaseExecutionException should have been thrown");
} catch (SearchPhaseExecutionException ex) {
// number format exception
}
expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch().setQuery(matchQuery("double", "2 3 4")).get());
}
public void testMultiMatchQuery() throws Exception {
@ -1777,16 +1769,12 @@ public class SearchQueryIT extends ESIntegTestCase {
refresh();
//has_child fails if executed on "simple" index
try {
client().prepareSearch("simple")
.setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
fail("Should have failed as has_child query can only be executed against parent-child types");
} catch (SearchPhaseExecutionException e) {
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class,
() -> client().prepareSearch("simple").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get());
assertThat(e.shardFailures().length, greaterThan(0));
for (ShardSearchFailure shardSearchFailure : e.shardFailures()) {
assertThat(shardSearchFailure.reason(), containsString("no mapping found for type [child]"));
}
}
//has_child doesn't get parsed for "simple" index
SearchResponse searchResponse = client().prepareSearch("related", "simple")
@ -1983,14 +1971,10 @@ public class SearchQueryIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getId(), is("3"));
// When we use long values, it means we have ms since epoch UTC based so we don't apply any transformation
try {
Exception e = expectThrows(SearchPhaseExecutionException.class, () ->
client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from(1388534400000L).to(1388537940999L).timeZone("+01:00"))
.get();
fail("A Range Filter using ms since epoch with a TimeZone should raise a ParsingException");
} catch (SearchPhaseExecutionException e) {
// We expect it
}
.get());
searchResponse = client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01").to("2014-01-01T00:59:00").timeZone("-01:00"))
@ -2005,14 +1989,10 @@ public class SearchQueryIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getId(), is("4"));
// A Range Filter on a numeric field with a TimeZone should raise an exception
try {
e = expectThrows(SearchPhaseExecutionException.class, () ->
client().prepareSearch("test")
.setQuery(QueryBuilders.rangeQuery("num").from("0").to("4").timeZone("-01:00"))
.get();
fail("A Range Filter on a numeric field with a TimeZone should raise a ParsingException");
} catch (SearchPhaseExecutionException e) {
// We expect it
}
.get());
}
public void testSearchEmptyDoc() {