Use expectThrows() instead of try-catch blocks for testing expected exceptions
This commit is contained in:
parent
3d3dd7185d
commit
c63c5fa3f2
|
@ -97,40 +97,23 @@ public class VersionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testTooLongVersionFromString() {
|
||||
try {
|
||||
Version.fromString("1.0.0.1.3");
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
||||
}
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> Version.fromString("1.0.0.1.3"));
|
||||
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
||||
}
|
||||
|
||||
public void testTooShortVersionFromString() {
|
||||
try {
|
||||
Version.fromString("1.0");
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
||||
}
|
||||
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> Version.fromString("1.0"));
|
||||
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
||||
}
|
||||
|
||||
public void testWrongVersionFromString() {
|
||||
try {
|
||||
Version.fromString("WRONG.VERSION");
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
||||
}
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> Version.fromString("WRONG.VERSION"));
|
||||
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
||||
}
|
||||
|
||||
public void testVersionNoPresentInSettings() {
|
||||
try {
|
||||
Version.indexCreated(Settings.builder().build());
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage(), containsString("[index.version.created] is not present"));
|
||||
}
|
||||
Exception e = expectThrows(IllegalStateException.class, () -> Version.indexCreated(Settings.builder().build()));
|
||||
assertThat(e.getMessage(), containsString("[index.version.created] is not present"));
|
||||
}
|
||||
|
||||
public void testIndexCreatedVersion() {
|
||||
|
|
|
@ -137,6 +137,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase {
|
|||
THREAD_POOL = new TestThreadPool(TransportInstanceSingleOperationActionTests.class.getSimpleName());
|
||||
}
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
@ -156,6 +157,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
|
|
|
@ -226,12 +226,8 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
// Related to issue #15822
|
||||
public void testInvalidBodyThrowsParseException() throws Exception {
|
||||
UpdateRequest request = new UpdateRequest("test", "type", "1");
|
||||
try {
|
||||
request.fromXContent(new byte[] { (byte) '"' });
|
||||
fail("Should have thrown a ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), equalTo("Failed to derive xcontent"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> request.fromXContent(new byte[] { (byte) '"' }));
|
||||
assertThat(e.getMessage(), equalTo("Failed to derive xcontent"));
|
||||
}
|
||||
|
||||
// Related to issue 15338
|
||||
|
|
|
@ -177,43 +177,31 @@ public class DateMathExpressionResolverTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testExpressionInvalidUnescaped() throws Exception {
|
||||
try {
|
||||
expressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>"));
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||
assertThat(e.getMessage(), containsString("invalid character at position ["));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||
() -> expressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")));
|
||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||
assertThat(e.getMessage(), containsString("invalid character at position ["));
|
||||
}
|
||||
|
||||
public void testExpressionInvalidDateMathFormat() throws Exception {
|
||||
try {
|
||||
expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>"));
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||
() -> expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")));
|
||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
|
||||
}
|
||||
|
||||
public void testExpressionInvalidEmptyDateMathFormat() throws Exception {
|
||||
try {
|
||||
expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>"));
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||
assertThat(e.getMessage(), containsString("missing date format"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||
() -> expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")));
|
||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||
assertThat(e.getMessage(), containsString("missing date format"));
|
||||
}
|
||||
|
||||
public void testExpressionInvalidOpenEnded() throws Exception {
|
||||
try {
|
||||
expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>"));
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||
() -> expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")));
|
||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -28,71 +28,43 @@ import static org.hamcrest.Matchers.instanceOf;
|
|||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class TableTests extends ESTestCase {
|
||||
|
||||
public void testFailOnStartRowWithoutHeader() {
|
||||
Table table = new Table();
|
||||
try {
|
||||
table.startRow();
|
||||
fail("Expected IllegalStateException");
|
||||
} catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage(), is("no headers added..."));
|
||||
}
|
||||
Exception e = expectThrows(IllegalStateException.class, () -> table.startRow());
|
||||
assertThat(e.getMessage(), is("no headers added..."));
|
||||
}
|
||||
|
||||
public void testFailOnEndHeadersWithoutStart() {
|
||||
Table table = new Table();
|
||||
try {
|
||||
table.endHeaders();
|
||||
fail("Expected IllegalStateException");
|
||||
} catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage(), is("no headers added..."));
|
||||
}
|
||||
|
||||
Exception e = expectThrows(IllegalStateException.class, () -> table.endHeaders());
|
||||
assertThat(e.getMessage(), is("no headers added..."));
|
||||
}
|
||||
|
||||
public void testFailOnAddCellWithoutHeader() {
|
||||
Table table = new Table();
|
||||
try {
|
||||
table.addCell("error");
|
||||
fail("Expected IllegalStateException");
|
||||
} catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage(), is("no block started..."));
|
||||
}
|
||||
|
||||
Exception e = expectThrows(IllegalStateException.class, () -> table.addCell("error"));
|
||||
assertThat(e.getMessage(), is("no block started..."));
|
||||
}
|
||||
|
||||
public void testFailOnAddCellWithoutRow() {
|
||||
Table table = this.getTableWithHeaders();
|
||||
try {
|
||||
table.addCell("error");
|
||||
fail("Expected IllegalStateException");
|
||||
} catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage(), is("no block started..."));
|
||||
}
|
||||
|
||||
Exception e = expectThrows(IllegalStateException.class, () -> table.addCell("error"));
|
||||
assertThat(e.getMessage(), is("no block started..."));
|
||||
}
|
||||
|
||||
public void testFailOnEndRowWithoutStart() {
|
||||
Table table = this.getTableWithHeaders();
|
||||
try {
|
||||
table.endRow();
|
||||
fail("Expected IllegalStateException");
|
||||
} catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage(), is("no row started..."));
|
||||
}
|
||||
|
||||
Exception e = expectThrows(IllegalStateException.class, () -> table.endRow());
|
||||
assertThat(e.getMessage(), is("no row started..."));
|
||||
}
|
||||
|
||||
public void testFailOnLessCellsThanDeclared() {
|
||||
Table table = this.getTableWithHeaders();
|
||||
table.startRow();
|
||||
table.addCell("foo");
|
||||
try {
|
||||
table.endRow(true);
|
||||
fail("Expected IllegalStateException");
|
||||
} catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage(), is("mismatch on number of cells 1 in a row compared to header 2"));
|
||||
}
|
||||
|
||||
Exception e = expectThrows(IllegalStateException.class, () -> table.endRow());
|
||||
assertThat(e.getMessage(), is("mismatch on number of cells 1 in a row compared to header 2"));
|
||||
}
|
||||
|
||||
public void testOnLessCellsThanDeclaredUnchecked() {
|
||||
|
@ -107,13 +79,8 @@ public class TableTests extends ESTestCase {
|
|||
table.startRow();
|
||||
table.addCell("foo");
|
||||
table.addCell("bar");
|
||||
try {
|
||||
table.addCell("foobar");
|
||||
fail("Expected IllegalStateException");
|
||||
} catch (IllegalStateException e) {
|
||||
assertThat(e.getMessage(), is("can't add more cells to a row than the header"));
|
||||
}
|
||||
|
||||
Exception e = expectThrows(IllegalStateException.class, () -> table.addCell("foobar"));
|
||||
assertThat(e.getMessage(), is("can't add more cells to a row than the header"));
|
||||
}
|
||||
|
||||
public void testSimple() {
|
||||
|
|
|
@ -19,12 +19,6 @@
|
|||
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.impl.PointImpl;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.LineString;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
|
@ -35,6 +29,12 @@ import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
|||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.impl.PointImpl;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertMultiLineString;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertMultiPolygon;
|
||||
|
@ -183,17 +183,13 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testPolygonSelfIntersection() {
|
||||
try {
|
||||
ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
PolygonBuilder newPolygon = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||
.coordinate(-40.0, 50.0)
|
||||
.coordinate(40.0, 50.0)
|
||||
.coordinate(-40.0, -50.0)
|
||||
.coordinate(40.0, -50.0).close())
|
||||
.build();
|
||||
fail("Expected InvalidShapeException");
|
||||
} catch (InvalidShapeException e) {
|
||||
assertThat(e.getMessage(), containsString("Self-intersection at or near point (0.0"));
|
||||
}
|
||||
.coordinate(40.0, -50.0).close());
|
||||
Exception e = expectThrows(InvalidShapeException.class, () -> newPolygon.build());
|
||||
assertThat(e.getMessage(), containsString("Self-intersection at or near point (0.0"));
|
||||
}
|
||||
|
||||
public void testGeoCircle() {
|
||||
|
@ -550,12 +546,8 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.coordinate(179, -10)
|
||||
.coordinate(164, 0)
|
||||
));
|
||||
try {
|
||||
builder.close().build();
|
||||
fail("Expected InvalidShapeException");
|
||||
} catch (InvalidShapeException e) {
|
||||
assertThat(e.getMessage(), containsString("interior cannot share more than one point with the exterior"));
|
||||
}
|
||||
Exception e = expectThrows(InvalidShapeException.class, () -> builder.close().build());
|
||||
assertThat(e.getMessage(), containsString("interior cannot share more than one point with the exterior"));
|
||||
}
|
||||
|
||||
public void testBoundaryShapeWithTangentialHole() {
|
||||
|
@ -602,12 +594,8 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.coordinate(176, -10)
|
||||
.coordinate(-177, 10)
|
||||
));
|
||||
try {
|
||||
builder.close().build();
|
||||
fail("Expected InvalidShapeException");
|
||||
} catch (InvalidShapeException e) {
|
||||
assertThat(e.getMessage(), containsString("interior cannot share more than one point with the exterior"));
|
||||
}
|
||||
Exception e = expectThrows(InvalidShapeException.class, () -> builder.close().build());
|
||||
assertThat(e.getMessage(), containsString("interior cannot share more than one point with the exterior"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -659,11 +647,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.coordinate(-176, 4)
|
||||
.coordinate(180, 0)
|
||||
);
|
||||
try {
|
||||
builder.close().build();
|
||||
fail("Expected InvalidShapeException");
|
||||
} catch (InvalidShapeException e) {
|
||||
assertThat(e.getMessage(), containsString("duplicate consecutive coordinates at: ("));
|
||||
}
|
||||
Exception e = expectThrows(InvalidShapeException.class, () -> builder.close().build());
|
||||
assertThat(e.getMessage(), containsString("duplicate consecutive coordinates at: ("));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -97,14 +97,7 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
|
||||
// bulk-write with wrong args
|
||||
try {
|
||||
out.writeBytes(new byte[]{}, 0, 1);
|
||||
fail("expected IllegalArgumentException: length > (size-offset)");
|
||||
}
|
||||
catch (IllegalArgumentException iax1) {
|
||||
// expected
|
||||
}
|
||||
|
||||
expectThrows(IllegalArgumentException.class, () -> out.writeBytes(new byte[]{}, 0, 1));
|
||||
out.close();
|
||||
}
|
||||
|
||||
|
@ -333,18 +326,21 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testNamedWriteable() throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
|
||||
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new)
|
||||
));
|
||||
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
|
||||
out.writeNamedWriteable(namedWriteableIn);
|
||||
byte[] bytes = BytesReference.toBytes(out.bytes());
|
||||
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
|
||||
assertEquals(in.available(), bytes.length);
|
||||
BaseNamedWriteable namedWriteableOut = in.readNamedWriteable(BaseNamedWriteable.class);
|
||||
assertEquals(namedWriteableIn, namedWriteableOut);
|
||||
assertEquals(0, in.available());
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
|
||||
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new)));
|
||||
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10),
|
||||
randomAsciiOfLengthBetween(1, 10));
|
||||
out.writeNamedWriteable(namedWriteableIn);
|
||||
byte[] bytes = BytesReference.toBytes(out.bytes());
|
||||
|
||||
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) {
|
||||
assertEquals(in.available(), bytes.length);
|
||||
BaseNamedWriteable namedWriteableOut = in.readNamedWriteable(BaseNamedWriteable.class);
|
||||
assertEquals(namedWriteableIn, namedWriteableOut);
|
||||
assertEquals(0, in.available());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testNamedWriteableList() throws IOException {
|
||||
|
@ -367,59 +363,61 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testNamedWriteableNotSupportedWithoutWrapping() throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2");
|
||||
out.writeNamedWriteable(testNamedWriteable);
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
|
||||
try {
|
||||
in.readNamedWriteable(BaseNamedWriteable.class);
|
||||
fail("Expected UnsupportedOperationException");
|
||||
} catch (UnsupportedOperationException e) {
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2");
|
||||
out.writeNamedWriteable(testNamedWriteable);
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
|
||||
Exception e = expectThrows(UnsupportedOperationException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
|
||||
assertThat(e.getMessage(), is("can't read named writeable from StreamInput"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testNamedWriteableReaderReturnsNull() throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
|
||||
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null)
|
||||
));
|
||||
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
|
||||
out.writeNamedWriteable(namedWriteableIn);
|
||||
byte[] bytes = BytesReference.toBytes(out.bytes());
|
||||
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
|
||||
assertEquals(in.available(), bytes.length);
|
||||
IOException e = expectThrows(IOException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
|
||||
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
|
||||
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null)));
|
||||
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10),
|
||||
randomAsciiOfLengthBetween(1, 10));
|
||||
out.writeNamedWriteable(namedWriteableIn);
|
||||
byte[] bytes = BytesReference.toBytes(out.bytes());
|
||||
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) {
|
||||
assertEquals(in.available(), bytes.length);
|
||||
IOException e = expectThrows(IOException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
|
||||
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testOptionalWriteableReaderReturnsNull() throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
out.writeOptionalWriteable(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)));
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
|
||||
IOException e = expectThrows(IOException.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null));
|
||||
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
out.writeOptionalWriteable(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)));
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
|
||||
IOException e = expectThrows(IOException.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null));
|
||||
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
|
||||
}
|
||||
}
|
||||
|
||||
public void testWriteableReaderReturnsWrongName() throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
|
||||
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) ->
|
||||
new TestNamedWriteable(in) {
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return "intentionally-broken";
|
||||
}
|
||||
})
|
||||
));
|
||||
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
|
||||
out.writeNamedWriteable(namedWriteableIn);
|
||||
byte[] bytes = BytesReference.toBytes(out.bytes());
|
||||
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
|
||||
assertEquals(in.available(), bytes.length);
|
||||
AssertionError e = expectThrows(AssertionError.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
|
||||
assertThat(e.getMessage(),
|
||||
endsWith(" claims to have a different name [intentionally-broken] than it was read from [test-named-writeable]."));
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(
|
||||
Collections.singletonList(new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME,
|
||||
(StreamInput in) -> new TestNamedWriteable(in) {
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return "intentionally-broken";
|
||||
}
|
||||
})));
|
||||
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10),
|
||||
randomAsciiOfLengthBetween(1, 10));
|
||||
out.writeNamedWriteable(namedWriteableIn);
|
||||
byte[] bytes = BytesReference.toBytes(out.bytes());
|
||||
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) {
|
||||
assertEquals(in.available(), bytes.length);
|
||||
AssertionError e = expectThrows(AssertionError.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
|
||||
assertThat(e.getMessage(),
|
||||
endsWith(" claims to have a different name [intentionally-broken] than it was read from [test-named-writeable]."));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testWriteStreamableList() throws IOException {
|
||||
|
@ -551,32 +549,13 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
assertEquals(-1, out.position());
|
||||
|
||||
// writing a single byte must fail
|
||||
try {
|
||||
out.writeByte((byte)0);
|
||||
fail("expected IllegalStateException: stream closed");
|
||||
}
|
||||
catch (IllegalStateException iex1) {
|
||||
// expected
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> out.writeByte((byte)0));
|
||||
|
||||
// writing in bulk must fail
|
||||
try {
|
||||
out.writeBytes(new byte[0], 0, 0);
|
||||
fail("expected IllegalStateException: stream closed");
|
||||
}
|
||||
catch (IllegalStateException iex1) {
|
||||
// expected
|
||||
}
|
||||
expectThrows(IllegalArgumentException.class, () -> out.writeBytes(new byte[0], 0, 0));
|
||||
|
||||
// toByteArray() must fail
|
||||
try {
|
||||
BytesReference.toBytes(out.bytes());
|
||||
fail("expected IllegalStateException: stream closed");
|
||||
}
|
||||
catch (IllegalStateException iex1) {
|
||||
// expected
|
||||
}
|
||||
|
||||
expectThrows(IllegalArgumentException.class, () -> BytesReference.toBytes(out.bytes()));
|
||||
}
|
||||
|
||||
// create & fill byte[] with randomized data
|
||||
|
@ -587,16 +566,15 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testReadWriteGeoPoint() throws IOException {
|
||||
{
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {;
|
||||
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
|
||||
out.writeGenericValue(geoPoint);
|
||||
StreamInput wrap = out.bytes().streamInput();
|
||||
GeoPoint point = (GeoPoint) wrap.readGenericValue();
|
||||
assertEquals(point, geoPoint);
|
||||
}
|
||||
{
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
|
||||
out.writeGeoPoint(geoPoint);
|
||||
StreamInput wrap = out.bytes().streamInput();
|
||||
|
@ -640,12 +618,12 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
|
||||
assertNotEquals(mapKeys, reverseMapKeys);
|
||||
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
BytesStreamOutput reverseMapOutput = new BytesStreamOutput();
|
||||
output.writeMapWithConsistentOrder(map);
|
||||
reverseMapOutput.writeMapWithConsistentOrder(reverseMap);
|
||||
try (BytesStreamOutput output = new BytesStreamOutput(); BytesStreamOutput reverseMapOutput = new BytesStreamOutput()) {
|
||||
output.writeMapWithConsistentOrder(map);
|
||||
reverseMapOutput.writeMapWithConsistentOrder(reverseMap);
|
||||
|
||||
assertEquals(output.bytes(), reverseMapOutput.bytes());
|
||||
assertEquals(output.bytes(), reverseMapOutput.bytes());
|
||||
}
|
||||
}
|
||||
|
||||
public void testReadMapByUsingWriteMapWithConsistentOrder() throws IOException {
|
||||
|
@ -653,18 +631,20 @@ public class BytesStreamsTests extends ESTestCase {
|
|||
randomMap(new HashMap<>(), randomIntBetween(2, 20),
|
||||
() -> randomAsciiOfLength(5),
|
||||
() -> randomAsciiOfLength(5));
|
||||
BytesStreamOutput streamOut = new BytesStreamOutput();
|
||||
streamOut.writeMapWithConsistentOrder(streamOutMap);
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(streamOut.bytes()));
|
||||
Map<String, Object> streamInMap = in.readMap();
|
||||
assertEquals(streamOutMap, streamInMap);
|
||||
try (BytesStreamOutput streamOut = new BytesStreamOutput()) {
|
||||
streamOut.writeMapWithConsistentOrder(streamOutMap);
|
||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(streamOut.bytes()));
|
||||
Map<String, Object> streamInMap = in.readMap();
|
||||
assertEquals(streamOutMap, streamInMap);
|
||||
}
|
||||
}
|
||||
|
||||
public void testWriteMapWithConsistentOrderWithLinkedHashMapShouldThrowAssertError() throws IOException {
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
Map<String, Object> map = new LinkedHashMap<>();
|
||||
Throwable e = expectThrows(AssertionError.class, () -> output.writeMapWithConsistentOrder(map));
|
||||
assertEquals(AssertionError.class, e.getClass());
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
Map<String, Object> map = new LinkedHashMap<>();
|
||||
Throwable e = expectThrows(AssertionError.class, () -> output.writeMapWithConsistentOrder(map));
|
||||
assertEquals(AssertionError.class, e.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
private static <K, V> Map<K, V> randomMap(Map<K, V> map, int size, Supplier<K> keyGenerator, Supplier<V> valueGenerator) {
|
||||
|
|
|
@ -123,48 +123,30 @@ public class ByteSizeValueTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testFailOnMissingUnits() {
|
||||
try {
|
||||
ByteSizeValue.parseBytesSizeValue("23", "test");
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("23", "test"));
|
||||
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
||||
}
|
||||
|
||||
public void testFailOnUnknownUnits() {
|
||||
try {
|
||||
ByteSizeValue.parseBytesSizeValue("23jw", "test");
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("23jw", "test"));
|
||||
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
||||
}
|
||||
|
||||
public void testFailOnEmptyParsing() {
|
||||
try {
|
||||
assertThat(ByteSizeValue.parseBytesSizeValue("", "emptyParsing").toString(), is("23kb"));
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("failed to parse setting [emptyParsing]"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||
() -> assertThat(ByteSizeValue.parseBytesSizeValue("", "emptyParsing").toString(), is("23kb")));
|
||||
assertThat(e.getMessage(), containsString("failed to parse setting [emptyParsing]"));
|
||||
}
|
||||
|
||||
public void testFailOnEmptyNumberParsing() {
|
||||
try {
|
||||
assertThat(ByteSizeValue.parseBytesSizeValue("g", "emptyNumberParsing").toString(), is("23b"));
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("failed to parse [g]"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||
() -> assertThat(ByteSizeValue.parseBytesSizeValue("g", "emptyNumberParsing").toString(), is("23b")));
|
||||
assertThat(e.getMessage(), containsString("failed to parse [g]"));
|
||||
}
|
||||
|
||||
public void testNoDotsAllowed() {
|
||||
try {
|
||||
ByteSizeValue.parseBytesSizeValue("42b.", null, "test");
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("42b.", null, "test"));
|
||||
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
||||
}
|
||||
|
||||
public void testCompareEquality() {
|
||||
|
|
|
@ -87,7 +87,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase {
|
|||
|
||||
currentNodes = DiscoveryNodes.builder();
|
||||
currentNodes.masterNodeId("b").add(new DiscoveryNode("b", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT));
|
||||
;
|
||||
|
||||
// version isn't taken into account, so randomize it to ensure this.
|
||||
if (randomBoolean()) {
|
||||
currentState.version(2);
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.hamcrest.CoreMatchers;
|
|||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.lang.NumberFormatException;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
|
||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||
|
@ -256,85 +255,60 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.bytes());
|
||||
|
||||
try {
|
||||
expectThrows(MapperParsingException.class, () ->
|
||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("point").field("lat", -91).field("lon", 1.3).endObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
.bytes()));
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
expectThrows(MapperParsingException.class, () ->
|
||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("point").field("lat", 91).field("lon", 1.3).endObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
.bytes()));
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
expectThrows(MapperParsingException.class, () ->
|
||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("point").field("lat", 1.2).field("lon", -181).endObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
.bytes()));
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
expectThrows(MapperParsingException.class, () ->
|
||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("point").field("lat", 1.2).field("lon", 181).endObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
.bytes()));
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
|
||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("point").field("lat", "-").field("lon", 1.3).endObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
||||
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
||||
}
|
||||
.bytes()));
|
||||
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
||||
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
||||
|
||||
try {
|
||||
e = expectThrows(MapperParsingException.class, () ->
|
||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("point").field("lat", 1.2).field("lon", "-").endObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
||||
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
||||
}
|
||||
.bytes()));
|
||||
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
||||
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
||||
|
||||
try {
|
||||
e = expectThrows(MapperParsingException.class, () ->
|
||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("point").field("lat", "-").field("lon", "-").endObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
fail();
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
||||
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
||||
}
|
||||
.bytes()));
|
||||
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
||||
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
||||
}
|
||||
|
||||
public void testNoValidateLegacyLatLonValues() throws Exception {
|
||||
|
@ -743,92 +717,84 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||
try {
|
||||
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(normalizeMapping));
|
||||
} catch (MapperParsingException e) {
|
||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [geohash : true]");
|
||||
}
|
||||
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject().endObject().endObject()
|
||||
.string();
|
||||
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||
parser.parse("type", new CompressedXContent(normalizeMapping)));
|
||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [geohash : true]");
|
||||
}
|
||||
|
||||
try {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||
{
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("point").field("type", "geo_point");
|
||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||
}
|
||||
String validateMapping = xContentBuilder.field("validate", true).endObject().endObject().endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(validateMapping));
|
||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||
} catch (MapperParsingException e) {
|
||||
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||
parser.parse("type", new CompressedXContent(validateMapping)));
|
||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]");
|
||||
}
|
||||
|
||||
try {
|
||||
{
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||
}
|
||||
String validateMapping = xContentBuilder.field("validate_lat", true).endObject().endObject().endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(validateMapping));
|
||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||
} catch (MapperParsingException e) {
|
||||
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||
parser.parse("type", new CompressedXContent(validateMapping)));
|
||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]");
|
||||
}
|
||||
|
||||
try {
|
||||
{
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||
}
|
||||
String validateMapping = xContentBuilder.field("validate_lon", true).endObject().endObject().endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(validateMapping));
|
||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||
} catch (MapperParsingException e) {
|
||||
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||
parser.parse("type", new CompressedXContent(validateMapping)));
|
||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]");
|
||||
}
|
||||
|
||||
// test deprecated normalize
|
||||
try {
|
||||
{
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||
}
|
||||
String normalizeMapping = xContentBuilder.field("normalize", true).endObject().endObject().endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(normalizeMapping));
|
||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||
} catch (MapperParsingException e) {
|
||||
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||
parser.parse("type", new CompressedXContent(normalizeMapping)));
|
||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]");
|
||||
}
|
||||
|
||||
try {
|
||||
{
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||
}
|
||||
String normalizeMapping = xContentBuilder.field("normalize_lat", true).endObject().endObject().endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(normalizeMapping));
|
||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||
} catch (MapperParsingException e) {
|
||||
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||
parser.parse("type", new CompressedXContent(normalizeMapping)));
|
||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]");
|
||||
}
|
||||
|
||||
try {
|
||||
{
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||
}
|
||||
String normalizeMapping = xContentBuilder.field("normalize_lon", true).endObject().endObject().endObject().endObject().string();
|
||||
parser.parse("type", new CompressedXContent(normalizeMapping));
|
||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||
} catch (MapperParsingException e) {
|
||||
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||
parser.parse("type", new CompressedXContent(normalizeMapping)));
|
||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]");
|
||||
}
|
||||
}
|
||||
|
@ -844,20 +810,17 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
|
||||
.field("geohash", false).endObject().endObject().endObject().endObject().string();
|
||||
try {
|
||||
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]"));
|
||||
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash]"));
|
||||
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash_precision]"));
|
||||
}
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () ->
|
||||
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false));
|
||||
assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]"));
|
||||
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash]"));
|
||||
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash_precision]"));
|
||||
|
||||
// correct mapping and ensure no failures
|
||||
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
String stage2MappingCorrect = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
|
||||
.field("geohash", true).endObject().endObject().endObject().endObject().string();
|
||||
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
mapperService.merge("type", new CompressedXContent(stage2MappingCorrect), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
}
|
||||
|
||||
public void testLegacyGeoHashSearch() throws Exception {
|
||||
|
|
|
@ -22,13 +22,8 @@ package org.elasticsearch.index.mapper;
|
|||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ObjectMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.index.mapper.ObjectMapper.Dynamic;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.TypeFieldMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -366,22 +361,16 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
|
|||
createIndex("test1").mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false);
|
||||
|
||||
// explicitly setting limit to 0 prevents nested fields
|
||||
try {
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () ->
|
||||
createIndex("test2", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
|
||||
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("Limit of nested fields [0] in index [test2] has been exceeded"));
|
||||
}
|
||||
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false));
|
||||
assertThat(e.getMessage(), containsString("Limit of nested fields [0] in index [test2] has been exceeded"));
|
||||
|
||||
// setting limit to 1 with 2 nested fields fails
|
||||
try {
|
||||
e = expectThrows(IllegalArgumentException.class, () ->
|
||||
createIndex("test3", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 1).build())
|
||||
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("Limit of nested fields [1] in index [test3] has been exceeded"));
|
||||
}
|
||||
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false));
|
||||
assertThat(e.getMessage(), containsString("Limit of nested fields [1] in index [test3] has been exceeded"));
|
||||
|
||||
MapperService mapperService = createIndex("test4", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 2)
|
||||
.build()).mapperService();
|
||||
|
@ -391,12 +380,9 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
|
|||
// adding new fields from different type is not ok
|
||||
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type3").startObject("properties").startObject("nested3")
|
||||
.field("type", "nested").startObject("properties").endObject().endObject().endObject().endObject().endObject().string();
|
||||
try {
|
||||
mapperService.merge("type3", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("Limit of nested fields [2] in index [test4] has been exceeded"));
|
||||
}
|
||||
e = expectThrows(IllegalArgumentException.class, () ->
|
||||
mapperService.merge("type3", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false));
|
||||
assertThat(e.getMessage(), containsString("Limit of nested fields [2] in index [test4] has been exceeded"));
|
||||
|
||||
// do not check nested fields limit if mapping is not updated
|
||||
createIndex("test5", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
|
||||
|
|
|
@ -30,20 +30,11 @@ import org.elasticsearch.cluster.metadata.MetaData;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceToParse;
|
||||
import org.elasticsearch.index.mapper.TimestampFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
@ -57,7 +48,6 @@ import java.util.Collection;
|
|||
import java.util.LinkedHashMap;
|
||||
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -212,12 +202,9 @@ public class TimestampFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.field("default", (String) null)
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
try {
|
||||
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
|
||||
} catch (TimestampParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
|
||||
}
|
||||
TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService()
|
||||
.documentMapperParser().parse("type", new CompressedXContent(mapping.string())));
|
||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
|
@ -229,12 +216,9 @@ public class TimestampFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject().endObject();
|
||||
|
||||
try {
|
||||
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
|
||||
} catch (TimestampParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
|
||||
}
|
||||
TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService()
|
||||
.documentMapperParser().parse("type", new CompressedXContent(mapping.string())));
|
||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
|
@ -247,12 +231,9 @@ public class TimestampFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.endObject().endObject();
|
||||
|
||||
try {
|
||||
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
||||
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set with ignore_missing set to false");
|
||||
} catch (TimestampParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false"));
|
||||
}
|
||||
TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService()
|
||||
.documentMapperParser().parse("type", new CompressedXContent(mapping.string())));
|
||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false"));
|
||||
}
|
||||
|
||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||
|
|
|
@ -30,8 +30,8 @@ import org.elasticsearch.test.geo.RandomGeoGenerator;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class GeoPointParsingTests extends ESTestCase {
|
||||
static double TOLERANCE = 1E-5;
|
||||
|
@ -112,13 +112,8 @@ public class GeoPointParsingTests extends ESTestCase {
|
|||
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||
parser.nextToken();
|
||||
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
||||
}
|
||||
|
||||
public void testInvalidPointLatHashMix() throws IOException {
|
||||
|
@ -130,12 +125,8 @@ public class GeoPointParsingTests extends ESTestCase {
|
|||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||
parser.nextToken();
|
||||
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("field must be either lat/lon or geohash"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("field must be either lat/lon or geohash"));
|
||||
}
|
||||
|
||||
public void testInvalidPointLonHashMix() throws IOException {
|
||||
|
@ -147,12 +138,8 @@ public class GeoPointParsingTests extends ESTestCase {
|
|||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||
parser.nextToken();
|
||||
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("field must be either lat/lon or geohash"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("field must be either lat/lon or geohash"));
|
||||
}
|
||||
|
||||
public void testInvalidField() throws IOException {
|
||||
|
@ -164,12 +151,8 @@ public class GeoPointParsingTests extends ESTestCase {
|
|||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||
parser.nextToken();
|
||||
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
||||
}
|
||||
|
||||
private static XContentParser objectLatLon(double lat, double lon) throws IOException {
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.search.geo;
|
||||
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
import org.apache.lucene.spatial.prefix.tree.Cell;
|
||||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
||||
|
@ -33,6 +31,8 @@ import org.elasticsearch.common.xcontent.XContentHelper;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -439,12 +439,8 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
BytesReference jsonBytes = jsonBuilder().startObject().field("geohash", 1.0).endObject().bytes();
|
||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||
parser.nextToken();
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("geohash must be a string"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), containsString("geohash must be a string"));
|
||||
}
|
||||
|
||||
public void testParseGeoPointLatNoLon() throws IOException {
|
||||
|
@ -452,12 +448,8 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).endObject().bytes();
|
||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||
parser.nextToken();
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("field [lon] missing"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("field [lon] missing"));
|
||||
}
|
||||
|
||||
public void testParseGeoPointLonNoLat() throws IOException {
|
||||
|
@ -465,12 +457,8 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
BytesReference jsonBytes = jsonBuilder().startObject().field("lon", lon).endObject().bytes();
|
||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||
parser.nextToken();
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("field [lat] missing"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("field [lat] missing"));
|
||||
}
|
||||
|
||||
public void testParseGeoPointLonWrongType() throws IOException {
|
||||
|
@ -478,12 +466,8 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).field("lon", false).endObject().bytes();
|
||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||
parser.nextToken();
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("longitude must be a number"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("longitude must be a number"));
|
||||
}
|
||||
|
||||
public void testParseGeoPointLatWrongType() throws IOException {
|
||||
|
@ -491,12 +475,8 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", false).field("lon", lon).endObject().bytes();
|
||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||
parser.nextToken();
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("latitude must be a number"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("latitude must be a number"));
|
||||
}
|
||||
|
||||
public void testParseGeoPointExtraField() throws IOException {
|
||||
|
@ -505,12 +485,8 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("foo", true).endObject().bytes();
|
||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||
parser.nextToken();
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
||||
}
|
||||
|
||||
public void testParseGeoPointLonLatGeoHash() throws IOException {
|
||||
|
@ -521,12 +497,8 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
.bytes();
|
||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||
parser.nextToken();
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash"));
|
||||
}
|
||||
|
||||
public void testParseGeoPointArrayTooManyValues() throws IOException {
|
||||
|
@ -539,12 +511,8 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
while (parser.currentToken() != Token.START_ARRAY) {
|
||||
parser.nextToken();
|
||||
}
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("only two values allowed"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("only two values allowed"));
|
||||
}
|
||||
|
||||
public void testParseGeoPointArrayWrongType() throws IOException {
|
||||
|
@ -555,12 +523,8 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
while (parser.currentToken() != Token.START_ARRAY) {
|
||||
parser.nextToken();
|
||||
}
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("numeric value expected"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("numeric value expected"));
|
||||
}
|
||||
|
||||
public void testParseGeoPointInvalidType() throws IOException {
|
||||
|
@ -569,12 +533,8 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
while (parser.currentToken() != Token.VALUE_NUMBER) {
|
||||
parser.nextToken();
|
||||
}
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
fail("Expected ElasticsearchParseException");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), is("geo_point expected"));
|
||||
}
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||
assertThat(e.getMessage(), is("geo_point expected"));
|
||||
}
|
||||
|
||||
public void testPrefixTreeCellSizes() {
|
||||
|
|
|
@ -470,8 +470,6 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
;
|
||||
};
|
||||
thread[i].start();
|
||||
}
|
||||
|
@ -1172,6 +1170,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
|||
throw new RuntimeException("boom");
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
||||
return searcher;
|
||||
}
|
||||
|
|
|
@ -64,9 +64,8 @@ public class ShardPathTests extends ESTestCase {
|
|||
assumeTrue("This test tests multi data.path but we only got one", paths.length > 1);
|
||||
int id = randomIntBetween(1, 10);
|
||||
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, indexUUID, AllocationId.newInitializing()), paths);
|
||||
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings));
|
||||
fail("Expected IllegalStateException");
|
||||
} catch (IllegalStateException e) {
|
||||
Exception e = expectThrows(IllegalStateException.class, () ->
|
||||
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)));
|
||||
assertThat(e.getMessage(), containsString("more than one shard state found"));
|
||||
}
|
||||
}
|
||||
|
@ -81,9 +80,8 @@ public class ShardPathTests extends ESTestCase {
|
|||
Path path = randomFrom(paths);
|
||||
int id = randomIntBetween(1, 10);
|
||||
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), path);
|
||||
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings));
|
||||
fail("Expected IllegalStateException");
|
||||
} catch (IllegalStateException e) {
|
||||
Exception e = expectThrows(IllegalStateException.class, () ->
|
||||
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)));
|
||||
assertThat(e.getMessage(), containsString("expected: foobar on shard path"));
|
||||
}
|
||||
}
|
||||
|
@ -91,12 +89,8 @@ public class ShardPathTests extends ESTestCase {
|
|||
public void testIllegalCustomDataPath() {
|
||||
Index index = new Index("foo", "foo");
|
||||
final Path path = createTempDir().resolve(index.getUUID()).resolve("0");
|
||||
try {
|
||||
new ShardPath(true, path, path, new ShardId(index, 0));
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("shard state path must be different to the data path when using custom data paths"));
|
||||
}
|
||||
Exception e = expectThrows(IllegalArgumentException.class, () -> new ShardPath(true, path, path, new ShardId(index, 0)));
|
||||
assertThat(e.getMessage(), is("shard state path must be different to the data path when using custom data paths"));
|
||||
}
|
||||
|
||||
public void testValidCtor() {
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
@ -444,21 +443,15 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
public void testAllMissingStrict() throws Exception {
|
||||
createIndex("test1");
|
||||
try {
|
||||
expectThrows(IndexNotFoundException.class, () ->
|
||||
client().prepareSearch("test2")
|
||||
.setQuery(matchAllQuery())
|
||||
.execute().actionGet();
|
||||
fail("Exception should have been thrown.");
|
||||
} catch (IndexNotFoundException e) {
|
||||
}
|
||||
.execute().actionGet());
|
||||
|
||||
try {
|
||||
expectThrows(IndexNotFoundException.class, () ->
|
||||
client().prepareSearch("test2","test3")
|
||||
.setQuery(matchAllQuery())
|
||||
.execute().actionGet();
|
||||
fail("Exception should have been thrown.");
|
||||
} catch (IndexNotFoundException e) {
|
||||
}
|
||||
.execute().actionGet());
|
||||
|
||||
//you should still be able to run empty searches without things blowing up
|
||||
client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet();
|
||||
|
|
|
@ -70,22 +70,16 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
|||
|
||||
public void testSimpleCloseMissingIndex() {
|
||||
Client client = client();
|
||||
try {
|
||||
client.admin().indices().prepareClose("test1").execute().actionGet();
|
||||
fail("Expected IndexNotFoundException");
|
||||
} catch (IndexNotFoundException e) {
|
||||
assertThat(e.getMessage(), is("no such index"));
|
||||
}
|
||||
Exception e = expectThrows(IndexNotFoundException.class, () ->
|
||||
client.admin().indices().prepareClose("test1").execute().actionGet());
|
||||
assertThat(e.getMessage(), is("no such index"));
|
||||
}
|
||||
|
||||
public void testSimpleOpenMissingIndex() {
|
||||
Client client = client();
|
||||
try {
|
||||
client.admin().indices().prepareOpen("test1").execute().actionGet();
|
||||
fail("Expected IndexNotFoundException");
|
||||
} catch (IndexNotFoundException e) {
|
||||
assertThat(e.getMessage(), is("no such index"));
|
||||
}
|
||||
Exception e = expectThrows(IndexNotFoundException.class, () ->
|
||||
client.admin().indices().prepareOpen("test1").execute().actionGet());
|
||||
assertThat(e.getMessage(), is("no such index"));
|
||||
}
|
||||
|
||||
public void testCloseOneMissingIndex() {
|
||||
|
@ -93,12 +87,9 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
|||
createIndex("test1");
|
||||
ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
|
||||
assertThat(healthResponse.isTimedOut(), equalTo(false));
|
||||
try {
|
||||
client.admin().indices().prepareClose("test1", "test2").execute().actionGet();
|
||||
fail("Expected IndexNotFoundException");
|
||||
} catch (IndexNotFoundException e) {
|
||||
assertThat(e.getMessage(), is("no such index"));
|
||||
}
|
||||
Exception e = expectThrows(IndexNotFoundException.class, () ->
|
||||
client.admin().indices().prepareClose("test1", "test2").execute().actionGet());
|
||||
assertThat(e.getMessage(), is("no such index"));
|
||||
}
|
||||
|
||||
public void testCloseOneMissingIndexIgnoreMissing() {
|
||||
|
@ -117,12 +108,9 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
|||
createIndex("test1");
|
||||
ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
|
||||
assertThat(healthResponse.isTimedOut(), equalTo(false));
|
||||
try {
|
||||
client.admin().indices().prepareOpen("test1", "test2").execute().actionGet();
|
||||
fail("Expected IndexNotFoundException");
|
||||
} catch (IndexNotFoundException e) {
|
||||
assertThat(e.getMessage(), is("no such index"));
|
||||
}
|
||||
Exception e = expectThrows(IndexNotFoundException.class, () ->
|
||||
client.admin().indices().prepareOpen("test1", "test2").execute().actionGet());
|
||||
assertThat(e.getMessage(), is("no such index"));
|
||||
}
|
||||
|
||||
public void testOpenOneMissingIndexIgnoreMissing() {
|
||||
|
@ -204,42 +192,30 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
|||
|
||||
public void testCloseNoIndex() {
|
||||
Client client = client();
|
||||
try {
|
||||
client.admin().indices().prepareClose().execute().actionGet();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("index is missing"));
|
||||
}
|
||||
Exception e = expectThrows(ActionRequestValidationException.class, () ->
|
||||
client.admin().indices().prepareClose().execute().actionGet());
|
||||
assertThat(e.getMessage(), containsString("index is missing"));
|
||||
}
|
||||
|
||||
public void testCloseNullIndex() {
|
||||
Client client = client();
|
||||
try {
|
||||
client.admin().indices().prepareClose((String[])null).execute().actionGet();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("index is missing"));
|
||||
}
|
||||
Exception e = expectThrows(ActionRequestValidationException.class, () ->
|
||||
client.admin().indices().prepareClose((String[])null).execute().actionGet());
|
||||
assertThat(e.getMessage(), containsString("index is missing"));
|
||||
}
|
||||
|
||||
public void testOpenNoIndex() {
|
||||
Client client = client();
|
||||
try {
|
||||
client.admin().indices().prepareOpen().execute().actionGet();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("index is missing"));
|
||||
}
|
||||
Exception e = expectThrows(ActionRequestValidationException.class, () ->
|
||||
client.admin().indices().prepareOpen().execute().actionGet());
|
||||
assertThat(e.getMessage(), containsString("index is missing"));
|
||||
}
|
||||
|
||||
public void testOpenNullIndex() {
|
||||
Client client = client();
|
||||
try {
|
||||
client.admin().indices().prepareOpen((String[])null).execute().actionGet();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("index is missing"));
|
||||
}
|
||||
Exception e = expectThrows(ActionRequestValidationException.class, () ->
|
||||
client.admin().indices().prepareOpen((String[])null).execute().actionGet());
|
||||
assertThat(e.getMessage(), containsString("index is missing"));
|
||||
}
|
||||
|
||||
public void testOpenAlreadyOpenedIndex() {
|
||||
|
|
|
@ -535,13 +535,10 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(queryStringQuery("future:[now/d TO now+2M/d]").lowercaseExpandedTerms(false)).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
|
||||
try {
|
||||
client().prepareSearch().setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lowercaseExpandedTerms(false)).get();
|
||||
fail("expected SearchPhaseExecutionException (total failure)");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
assertThat(e.toString(), containsString("unit [D] not supported for date math"));
|
||||
}
|
||||
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch()
|
||||
.setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lowercaseExpandedTerms(false)).get());
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
assertThat(e.toString(), containsString("unit [D] not supported for date math"));
|
||||
}
|
||||
|
||||
// Issue #7880
|
||||
|
@ -776,12 +773,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchQuery("double", "2")).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertFirstHit(searchResponse, hasId("2"));
|
||||
try {
|
||||
client().prepareSearch().setQuery(matchQuery("double", "2 3 4")).get();
|
||||
fail("SearchPhaseExecutionException should have been thrown");
|
||||
} catch (SearchPhaseExecutionException ex) {
|
||||
// number format exception
|
||||
}
|
||||
expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch().setQuery(matchQuery("double", "2 3 4")).get());
|
||||
}
|
||||
|
||||
public void testMultiMatchQuery() throws Exception {
|
||||
|
@ -1777,15 +1769,11 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
refresh();
|
||||
|
||||
//has_child fails if executed on "simple" index
|
||||
try {
|
||||
client().prepareSearch("simple")
|
||||
.setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
|
||||
fail("Should have failed as has_child query can only be executed against parent-child types");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.shardFailures().length, greaterThan(0));
|
||||
for (ShardSearchFailure shardSearchFailure : e.shardFailures()) {
|
||||
assertThat(shardSearchFailure.reason(), containsString("no mapping found for type [child]"));
|
||||
}
|
||||
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class,
|
||||
() -> client().prepareSearch("simple").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get());
|
||||
assertThat(e.shardFailures().length, greaterThan(0));
|
||||
for (ShardSearchFailure shardSearchFailure : e.shardFailures()) {
|
||||
assertThat(shardSearchFailure.reason(), containsString("no mapping found for type [child]"));
|
||||
}
|
||||
|
||||
//has_child doesn't get parsed for "simple" index
|
||||
|
@ -1983,14 +1971,10 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).getId(), is("3"));
|
||||
|
||||
// When we use long values, it means we have ms since epoch UTC based so we don't apply any transformation
|
||||
try {
|
||||
Exception e = expectThrows(SearchPhaseExecutionException.class, () ->
|
||||
client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.rangeQuery("date").from(1388534400000L).to(1388537940999L).timeZone("+01:00"))
|
||||
.get();
|
||||
fail("A Range Filter using ms since epoch with a TimeZone should raise a ParsingException");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
// We expect it
|
||||
}
|
||||
.get());
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01").to("2014-01-01T00:59:00").timeZone("-01:00"))
|
||||
|
@ -2005,14 +1989,10 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).getId(), is("4"));
|
||||
|
||||
// A Range Filter on a numeric field with a TimeZone should raise an exception
|
||||
try {
|
||||
e = expectThrows(SearchPhaseExecutionException.class, () ->
|
||||
client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.rangeQuery("num").from("0").to("4").timeZone("-01:00"))
|
||||
.get();
|
||||
fail("A Range Filter on a numeric field with a TimeZone should raise a ParsingException");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
// We expect it
|
||||
}
|
||||
.get());
|
||||
}
|
||||
|
||||
public void testSearchEmptyDoc() {
|
||||
|
|
Loading…
Reference in New Issue