Use expectThrows() instead of try-catch blocks for testing expected exceptions
This commit is contained in:
parent
3d3dd7185d
commit
c63c5fa3f2
|
@ -97,40 +97,23 @@ public class VersionTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTooLongVersionFromString() {
|
public void testTooLongVersionFromString() {
|
||||||
try {
|
Exception e = expectThrows(IllegalArgumentException.class, () -> Version.fromString("1.0.0.1.3"));
|
||||||
Version.fromString("1.0.0.1.3");
|
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
||||||
fail("Expected IllegalArgumentException");
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTooShortVersionFromString() {
|
public void testTooShortVersionFromString() {
|
||||||
try {
|
Exception e = expectThrows(IllegalArgumentException.class, () -> Version.fromString("1.0"));
|
||||||
Version.fromString("1.0");
|
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
||||||
fail("Expected IllegalArgumentException");
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWrongVersionFromString() {
|
public void testWrongVersionFromString() {
|
||||||
try {
|
Exception e = expectThrows(IllegalArgumentException.class, () -> Version.fromString("WRONG.VERSION"));
|
||||||
Version.fromString("WRONG.VERSION");
|
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
||||||
fail("Expected IllegalArgumentException");
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("needs to contain major, minor, and revision"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testVersionNoPresentInSettings() {
|
public void testVersionNoPresentInSettings() {
|
||||||
try {
|
Exception e = expectThrows(IllegalStateException.class, () -> Version.indexCreated(Settings.builder().build()));
|
||||||
Version.indexCreated(Settings.builder().build());
|
assertThat(e.getMessage(), containsString("[index.version.created] is not present"));
|
||||||
fail("Expected IllegalArgumentException");
|
|
||||||
} catch (IllegalStateException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("[index.version.created] is not present"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIndexCreatedVersion() {
|
public void testIndexCreatedVersion() {
|
||||||
|
|
|
@ -137,6 +137,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase {
|
||||||
THREAD_POOL = new TestThreadPool(TransportInstanceSingleOperationActionTests.class.getSimpleName());
|
THREAD_POOL = new TestThreadPool(TransportInstanceSingleOperationActionTests.class.getSimpleName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
@Before
|
@Before
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
|
@ -156,6 +157,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
@After
|
@After
|
||||||
public void tearDown() throws Exception {
|
public void tearDown() throws Exception {
|
||||||
super.tearDown();
|
super.tearDown();
|
||||||
|
|
|
@ -226,12 +226,8 @@ public class UpdateRequestTests extends ESTestCase {
|
||||||
// Related to issue #15822
|
// Related to issue #15822
|
||||||
public void testInvalidBodyThrowsParseException() throws Exception {
|
public void testInvalidBodyThrowsParseException() throws Exception {
|
||||||
UpdateRequest request = new UpdateRequest("test", "type", "1");
|
UpdateRequest request = new UpdateRequest("test", "type", "1");
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> request.fromXContent(new byte[] { (byte) '"' }));
|
||||||
request.fromXContent(new byte[] { (byte) '"' });
|
assertThat(e.getMessage(), equalTo("Failed to derive xcontent"));
|
||||||
fail("Should have thrown a ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), equalTo("Failed to derive xcontent"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Related to issue 15338
|
// Related to issue 15338
|
||||||
|
|
|
@ -177,43 +177,31 @@ public class DateMathExpressionResolverTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExpressionInvalidUnescaped() throws Exception {
|
public void testExpressionInvalidUnescaped() throws Exception {
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||||
expressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>"));
|
() -> expressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")));
|
||||||
fail("Expected ElasticsearchParseException");
|
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||||
} catch (ElasticsearchParseException e) {
|
assertThat(e.getMessage(), containsString("invalid character at position ["));
|
||||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
|
||||||
assertThat(e.getMessage(), containsString("invalid character at position ["));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExpressionInvalidDateMathFormat() throws Exception {
|
public void testExpressionInvalidDateMathFormat() throws Exception {
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||||
expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>"));
|
() -> expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")));
|
||||||
fail("Expected ElasticsearchParseException");
|
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||||
} catch (ElasticsearchParseException e) {
|
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
|
||||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
|
||||||
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExpressionInvalidEmptyDateMathFormat() throws Exception {
|
public void testExpressionInvalidEmptyDateMathFormat() throws Exception {
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||||
expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>"));
|
() -> expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")));
|
||||||
fail("Expected ElasticsearchParseException");
|
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||||
} catch (ElasticsearchParseException e) {
|
assertThat(e.getMessage(), containsString("missing date format"));
|
||||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
|
||||||
assertThat(e.getMessage(), containsString("missing date format"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExpressionInvalidOpenEnded() throws Exception {
|
public void testExpressionInvalidOpenEnded() throws Exception {
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||||
expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>"));
|
() -> expressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")));
|
||||||
fail("Expected ElasticsearchParseException");
|
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
||||||
} catch (ElasticsearchParseException e) {
|
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
|
||||||
assertThat(e.getMessage(), containsString("invalid dynamic name expression"));
|
|
||||||
assertThat(e.getMessage(), containsString("date math placeholder is open ended"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,71 +28,43 @@ import static org.hamcrest.Matchers.instanceOf;
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
|
|
||||||
public class TableTests extends ESTestCase {
|
public class TableTests extends ESTestCase {
|
||||||
|
|
||||||
public void testFailOnStartRowWithoutHeader() {
|
public void testFailOnStartRowWithoutHeader() {
|
||||||
Table table = new Table();
|
Table table = new Table();
|
||||||
try {
|
Exception e = expectThrows(IllegalStateException.class, () -> table.startRow());
|
||||||
table.startRow();
|
assertThat(e.getMessage(), is("no headers added..."));
|
||||||
fail("Expected IllegalStateException");
|
|
||||||
} catch (IllegalStateException e) {
|
|
||||||
assertThat(e.getMessage(), is("no headers added..."));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFailOnEndHeadersWithoutStart() {
|
public void testFailOnEndHeadersWithoutStart() {
|
||||||
Table table = new Table();
|
Table table = new Table();
|
||||||
try {
|
Exception e = expectThrows(IllegalStateException.class, () -> table.endHeaders());
|
||||||
table.endHeaders();
|
assertThat(e.getMessage(), is("no headers added..."));
|
||||||
fail("Expected IllegalStateException");
|
|
||||||
} catch (IllegalStateException e) {
|
|
||||||
assertThat(e.getMessage(), is("no headers added..."));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFailOnAddCellWithoutHeader() {
|
public void testFailOnAddCellWithoutHeader() {
|
||||||
Table table = new Table();
|
Table table = new Table();
|
||||||
try {
|
Exception e = expectThrows(IllegalStateException.class, () -> table.addCell("error"));
|
||||||
table.addCell("error");
|
assertThat(e.getMessage(), is("no block started..."));
|
||||||
fail("Expected IllegalStateException");
|
|
||||||
} catch (IllegalStateException e) {
|
|
||||||
assertThat(e.getMessage(), is("no block started..."));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFailOnAddCellWithoutRow() {
|
public void testFailOnAddCellWithoutRow() {
|
||||||
Table table = this.getTableWithHeaders();
|
Table table = this.getTableWithHeaders();
|
||||||
try {
|
Exception e = expectThrows(IllegalStateException.class, () -> table.addCell("error"));
|
||||||
table.addCell("error");
|
assertThat(e.getMessage(), is("no block started..."));
|
||||||
fail("Expected IllegalStateException");
|
|
||||||
} catch (IllegalStateException e) {
|
|
||||||
assertThat(e.getMessage(), is("no block started..."));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFailOnEndRowWithoutStart() {
|
public void testFailOnEndRowWithoutStart() {
|
||||||
Table table = this.getTableWithHeaders();
|
Table table = this.getTableWithHeaders();
|
||||||
try {
|
Exception e = expectThrows(IllegalStateException.class, () -> table.endRow());
|
||||||
table.endRow();
|
assertThat(e.getMessage(), is("no row started..."));
|
||||||
fail("Expected IllegalStateException");
|
|
||||||
} catch (IllegalStateException e) {
|
|
||||||
assertThat(e.getMessage(), is("no row started..."));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFailOnLessCellsThanDeclared() {
|
public void testFailOnLessCellsThanDeclared() {
|
||||||
Table table = this.getTableWithHeaders();
|
Table table = this.getTableWithHeaders();
|
||||||
table.startRow();
|
table.startRow();
|
||||||
table.addCell("foo");
|
table.addCell("foo");
|
||||||
try {
|
Exception e = expectThrows(IllegalStateException.class, () -> table.endRow());
|
||||||
table.endRow(true);
|
assertThat(e.getMessage(), is("mismatch on number of cells 1 in a row compared to header 2"));
|
||||||
fail("Expected IllegalStateException");
|
|
||||||
} catch (IllegalStateException e) {
|
|
||||||
assertThat(e.getMessage(), is("mismatch on number of cells 1 in a row compared to header 2"));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOnLessCellsThanDeclaredUnchecked() {
|
public void testOnLessCellsThanDeclaredUnchecked() {
|
||||||
|
@ -107,13 +79,8 @@ public class TableTests extends ESTestCase {
|
||||||
table.startRow();
|
table.startRow();
|
||||||
table.addCell("foo");
|
table.addCell("foo");
|
||||||
table.addCell("bar");
|
table.addCell("bar");
|
||||||
try {
|
Exception e = expectThrows(IllegalStateException.class, () -> table.addCell("foobar"));
|
||||||
table.addCell("foobar");
|
assertThat(e.getMessage(), is("can't add more cells to a row than the header"));
|
||||||
fail("Expected IllegalStateException");
|
|
||||||
} catch (IllegalStateException e) {
|
|
||||||
assertThat(e.getMessage(), is("can't add more cells to a row than the header"));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimple() {
|
public void testSimple() {
|
||||||
|
|
|
@ -19,12 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.common.geo;
|
package org.elasticsearch.common.geo;
|
||||||
|
|
||||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
|
||||||
import org.locationtech.spatial4j.shape.Circle;
|
|
||||||
import org.locationtech.spatial4j.shape.Point;
|
|
||||||
import org.locationtech.spatial4j.shape.Rectangle;
|
|
||||||
import org.locationtech.spatial4j.shape.Shape;
|
|
||||||
import org.locationtech.spatial4j.shape.impl.PointImpl;
|
|
||||||
import com.vividsolutions.jts.geom.Coordinate;
|
import com.vividsolutions.jts.geom.Coordinate;
|
||||||
import com.vividsolutions.jts.geom.LineString;
|
import com.vividsolutions.jts.geom.LineString;
|
||||||
import com.vividsolutions.jts.geom.Polygon;
|
import com.vividsolutions.jts.geom.Polygon;
|
||||||
|
@ -35,6 +29,12 @@ import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||||
|
import org.locationtech.spatial4j.shape.Circle;
|
||||||
|
import org.locationtech.spatial4j.shape.Point;
|
||||||
|
import org.locationtech.spatial4j.shape.Rectangle;
|
||||||
|
import org.locationtech.spatial4j.shape.Shape;
|
||||||
|
import org.locationtech.spatial4j.shape.impl.PointImpl;
|
||||||
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertMultiLineString;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertMultiLineString;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertMultiPolygon;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertMultiPolygon;
|
||||||
|
@ -183,17 +183,13 @@ public class ShapeBuilderTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPolygonSelfIntersection() {
|
public void testPolygonSelfIntersection() {
|
||||||
try {
|
PolygonBuilder newPolygon = ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
||||||
ShapeBuilders.newPolygon(new CoordinatesBuilder()
|
|
||||||
.coordinate(-40.0, 50.0)
|
.coordinate(-40.0, 50.0)
|
||||||
.coordinate(40.0, 50.0)
|
.coordinate(40.0, 50.0)
|
||||||
.coordinate(-40.0, -50.0)
|
.coordinate(-40.0, -50.0)
|
||||||
.coordinate(40.0, -50.0).close())
|
.coordinate(40.0, -50.0).close());
|
||||||
.build();
|
Exception e = expectThrows(InvalidShapeException.class, () -> newPolygon.build());
|
||||||
fail("Expected InvalidShapeException");
|
assertThat(e.getMessage(), containsString("Self-intersection at or near point (0.0"));
|
||||||
} catch (InvalidShapeException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("Self-intersection at or near point (0.0"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGeoCircle() {
|
public void testGeoCircle() {
|
||||||
|
@ -550,12 +546,8 @@ public class ShapeBuilderTests extends ESTestCase {
|
||||||
.coordinate(179, -10)
|
.coordinate(179, -10)
|
||||||
.coordinate(164, 0)
|
.coordinate(164, 0)
|
||||||
));
|
));
|
||||||
try {
|
Exception e = expectThrows(InvalidShapeException.class, () -> builder.close().build());
|
||||||
builder.close().build();
|
assertThat(e.getMessage(), containsString("interior cannot share more than one point with the exterior"));
|
||||||
fail("Expected InvalidShapeException");
|
|
||||||
} catch (InvalidShapeException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("interior cannot share more than one point with the exterior"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBoundaryShapeWithTangentialHole() {
|
public void testBoundaryShapeWithTangentialHole() {
|
||||||
|
@ -602,12 +594,8 @@ public class ShapeBuilderTests extends ESTestCase {
|
||||||
.coordinate(176, -10)
|
.coordinate(176, -10)
|
||||||
.coordinate(-177, 10)
|
.coordinate(-177, 10)
|
||||||
));
|
));
|
||||||
try {
|
Exception e = expectThrows(InvalidShapeException.class, () -> builder.close().build());
|
||||||
builder.close().build();
|
assertThat(e.getMessage(), containsString("interior cannot share more than one point with the exterior"));
|
||||||
fail("Expected InvalidShapeException");
|
|
||||||
} catch (InvalidShapeException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("interior cannot share more than one point with the exterior"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -659,11 +647,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
||||||
.coordinate(-176, 4)
|
.coordinate(-176, 4)
|
||||||
.coordinate(180, 0)
|
.coordinate(180, 0)
|
||||||
);
|
);
|
||||||
try {
|
Exception e = expectThrows(InvalidShapeException.class, () -> builder.close().build());
|
||||||
builder.close().build();
|
assertThat(e.getMessage(), containsString("duplicate consecutive coordinates at: ("));
|
||||||
fail("Expected InvalidShapeException");
|
|
||||||
} catch (InvalidShapeException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("duplicate consecutive coordinates at: ("));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,14 +97,7 @@ public class BytesStreamsTests extends ESTestCase {
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
BytesStreamOutput out = new BytesStreamOutput();
|
||||||
|
|
||||||
// bulk-write with wrong args
|
// bulk-write with wrong args
|
||||||
try {
|
expectThrows(IllegalArgumentException.class, () -> out.writeBytes(new byte[]{}, 0, 1));
|
||||||
out.writeBytes(new byte[]{}, 0, 1);
|
|
||||||
fail("expected IllegalArgumentException: length > (size-offset)");
|
|
||||||
}
|
|
||||||
catch (IllegalArgumentException iax1) {
|
|
||||||
// expected
|
|
||||||
}
|
|
||||||
|
|
||||||
out.close();
|
out.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -333,18 +326,21 @@ public class BytesStreamsTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNamedWriteable() throws IOException {
|
public void testNamedWriteable() throws IOException {
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
|
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
|
||||||
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new)
|
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new)));
|
||||||
));
|
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10),
|
||||||
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
|
randomAsciiOfLengthBetween(1, 10));
|
||||||
out.writeNamedWriteable(namedWriteableIn);
|
out.writeNamedWriteable(namedWriteableIn);
|
||||||
byte[] bytes = BytesReference.toBytes(out.bytes());
|
byte[] bytes = BytesReference.toBytes(out.bytes());
|
||||||
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
|
|
||||||
assertEquals(in.available(), bytes.length);
|
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) {
|
||||||
BaseNamedWriteable namedWriteableOut = in.readNamedWriteable(BaseNamedWriteable.class);
|
assertEquals(in.available(), bytes.length);
|
||||||
assertEquals(namedWriteableIn, namedWriteableOut);
|
BaseNamedWriteable namedWriteableOut = in.readNamedWriteable(BaseNamedWriteable.class);
|
||||||
assertEquals(0, in.available());
|
assertEquals(namedWriteableIn, namedWriteableOut);
|
||||||
|
assertEquals(0, in.available());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNamedWriteableList() throws IOException {
|
public void testNamedWriteableList() throws IOException {
|
||||||
|
@ -367,59 +363,61 @@ public class BytesStreamsTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNamedWriteableNotSupportedWithoutWrapping() throws IOException {
|
public void testNamedWriteableNotSupportedWithoutWrapping() throws IOException {
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2");
|
TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2");
|
||||||
out.writeNamedWriteable(testNamedWriteable);
|
out.writeNamedWriteable(testNamedWriteable);
|
||||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
|
StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
|
||||||
try {
|
Exception e = expectThrows(UnsupportedOperationException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
|
||||||
in.readNamedWriteable(BaseNamedWriteable.class);
|
|
||||||
fail("Expected UnsupportedOperationException");
|
|
||||||
} catch (UnsupportedOperationException e) {
|
|
||||||
assertThat(e.getMessage(), is("can't read named writeable from StreamInput"));
|
assertThat(e.getMessage(), is("can't read named writeable from StreamInput"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNamedWriteableReaderReturnsNull() throws IOException {
|
public void testNamedWriteableReaderReturnsNull() throws IOException {
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
|
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
|
||||||
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null)
|
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null)));
|
||||||
));
|
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10),
|
||||||
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
|
randomAsciiOfLengthBetween(1, 10));
|
||||||
out.writeNamedWriteable(namedWriteableIn);
|
out.writeNamedWriteable(namedWriteableIn);
|
||||||
byte[] bytes = BytesReference.toBytes(out.bytes());
|
byte[] bytes = BytesReference.toBytes(out.bytes());
|
||||||
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
|
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) {
|
||||||
assertEquals(in.available(), bytes.length);
|
assertEquals(in.available(), bytes.length);
|
||||||
IOException e = expectThrows(IOException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
|
IOException e = expectThrows(IOException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
|
||||||
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
|
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOptionalWriteableReaderReturnsNull() throws IOException {
|
public void testOptionalWriteableReaderReturnsNull() throws IOException {
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
out.writeOptionalWriteable(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)));
|
out.writeOptionalWriteable(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)));
|
||||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
|
StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes()));
|
||||||
IOException e = expectThrows(IOException.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null));
|
IOException e = expectThrows(IOException.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null));
|
||||||
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
|
assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream."));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWriteableReaderReturnsWrongName() throws IOException {
|
public void testWriteableReaderReturnsWrongName() throws IOException {
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.singletonList(
|
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(
|
||||||
new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) ->
|
Collections.singletonList(new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME,
|
||||||
new TestNamedWriteable(in) {
|
(StreamInput in) -> new TestNamedWriteable(in) {
|
||||||
@Override
|
@Override
|
||||||
public String getWriteableName() {
|
public String getWriteableName() {
|
||||||
return "intentionally-broken";
|
return "intentionally-broken";
|
||||||
}
|
}
|
||||||
})
|
})));
|
||||||
));
|
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10),
|
||||||
TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10));
|
randomAsciiOfLengthBetween(1, 10));
|
||||||
out.writeNamedWriteable(namedWriteableIn);
|
out.writeNamedWriteable(namedWriteableIn);
|
||||||
byte[] bytes = BytesReference.toBytes(out.bytes());
|
byte[] bytes = BytesReference.toBytes(out.bytes());
|
||||||
StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry);
|
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry)) {
|
||||||
assertEquals(in.available(), bytes.length);
|
assertEquals(in.available(), bytes.length);
|
||||||
AssertionError e = expectThrows(AssertionError.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
|
AssertionError e = expectThrows(AssertionError.class, () -> in.readNamedWriteable(BaseNamedWriteable.class));
|
||||||
assertThat(e.getMessage(),
|
assertThat(e.getMessage(),
|
||||||
endsWith(" claims to have a different name [intentionally-broken] than it was read from [test-named-writeable]."));
|
endsWith(" claims to have a different name [intentionally-broken] than it was read from [test-named-writeable]."));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWriteStreamableList() throws IOException {
|
public void testWriteStreamableList() throws IOException {
|
||||||
|
@ -551,32 +549,13 @@ public class BytesStreamsTests extends ESTestCase {
|
||||||
assertEquals(-1, out.position());
|
assertEquals(-1, out.position());
|
||||||
|
|
||||||
// writing a single byte must fail
|
// writing a single byte must fail
|
||||||
try {
|
expectThrows(IllegalArgumentException.class, () -> out.writeByte((byte)0));
|
||||||
out.writeByte((byte)0);
|
|
||||||
fail("expected IllegalStateException: stream closed");
|
|
||||||
}
|
|
||||||
catch (IllegalStateException iex1) {
|
|
||||||
// expected
|
|
||||||
}
|
|
||||||
|
|
||||||
// writing in bulk must fail
|
// writing in bulk must fail
|
||||||
try {
|
expectThrows(IllegalArgumentException.class, () -> out.writeBytes(new byte[0], 0, 0));
|
||||||
out.writeBytes(new byte[0], 0, 0);
|
|
||||||
fail("expected IllegalStateException: stream closed");
|
|
||||||
}
|
|
||||||
catch (IllegalStateException iex1) {
|
|
||||||
// expected
|
|
||||||
}
|
|
||||||
|
|
||||||
// toByteArray() must fail
|
// toByteArray() must fail
|
||||||
try {
|
expectThrows(IllegalArgumentException.class, () -> BytesReference.toBytes(out.bytes()));
|
||||||
BytesReference.toBytes(out.bytes());
|
|
||||||
fail("expected IllegalStateException: stream closed");
|
|
||||||
}
|
|
||||||
catch (IllegalStateException iex1) {
|
|
||||||
// expected
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// create & fill byte[] with randomized data
|
// create & fill byte[] with randomized data
|
||||||
|
@ -587,16 +566,15 @@ public class BytesStreamsTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testReadWriteGeoPoint() throws IOException {
|
public void testReadWriteGeoPoint() throws IOException {
|
||||||
{
|
try (BytesStreamOutput out = new BytesStreamOutput()) {;
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
|
||||||
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
|
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
|
||||||
out.writeGenericValue(geoPoint);
|
out.writeGenericValue(geoPoint);
|
||||||
StreamInput wrap = out.bytes().streamInput();
|
StreamInput wrap = out.bytes().streamInput();
|
||||||
GeoPoint point = (GeoPoint) wrap.readGenericValue();
|
GeoPoint point = (GeoPoint) wrap.readGenericValue();
|
||||||
assertEquals(point, geoPoint);
|
assertEquals(point, geoPoint);
|
||||||
}
|
}
|
||||||
{
|
|
||||||
BytesStreamOutput out = new BytesStreamOutput();
|
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||||
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
|
GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble());
|
||||||
out.writeGeoPoint(geoPoint);
|
out.writeGeoPoint(geoPoint);
|
||||||
StreamInput wrap = out.bytes().streamInput();
|
StreamInput wrap = out.bytes().streamInput();
|
||||||
|
@ -640,12 +618,12 @@ public class BytesStreamsTests extends ESTestCase {
|
||||||
|
|
||||||
assertNotEquals(mapKeys, reverseMapKeys);
|
assertNotEquals(mapKeys, reverseMapKeys);
|
||||||
|
|
||||||
BytesStreamOutput output = new BytesStreamOutput();
|
try (BytesStreamOutput output = new BytesStreamOutput(); BytesStreamOutput reverseMapOutput = new BytesStreamOutput()) {
|
||||||
BytesStreamOutput reverseMapOutput = new BytesStreamOutput();
|
output.writeMapWithConsistentOrder(map);
|
||||||
output.writeMapWithConsistentOrder(map);
|
reverseMapOutput.writeMapWithConsistentOrder(reverseMap);
|
||||||
reverseMapOutput.writeMapWithConsistentOrder(reverseMap);
|
|
||||||
|
|
||||||
assertEquals(output.bytes(), reverseMapOutput.bytes());
|
assertEquals(output.bytes(), reverseMapOutput.bytes());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testReadMapByUsingWriteMapWithConsistentOrder() throws IOException {
|
public void testReadMapByUsingWriteMapWithConsistentOrder() throws IOException {
|
||||||
|
@ -653,18 +631,20 @@ public class BytesStreamsTests extends ESTestCase {
|
||||||
randomMap(new HashMap<>(), randomIntBetween(2, 20),
|
randomMap(new HashMap<>(), randomIntBetween(2, 20),
|
||||||
() -> randomAsciiOfLength(5),
|
() -> randomAsciiOfLength(5),
|
||||||
() -> randomAsciiOfLength(5));
|
() -> randomAsciiOfLength(5));
|
||||||
BytesStreamOutput streamOut = new BytesStreamOutput();
|
try (BytesStreamOutput streamOut = new BytesStreamOutput()) {
|
||||||
streamOut.writeMapWithConsistentOrder(streamOutMap);
|
streamOut.writeMapWithConsistentOrder(streamOutMap);
|
||||||
StreamInput in = StreamInput.wrap(BytesReference.toBytes(streamOut.bytes()));
|
StreamInput in = StreamInput.wrap(BytesReference.toBytes(streamOut.bytes()));
|
||||||
Map<String, Object> streamInMap = in.readMap();
|
Map<String, Object> streamInMap = in.readMap();
|
||||||
assertEquals(streamOutMap, streamInMap);
|
assertEquals(streamOutMap, streamInMap);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWriteMapWithConsistentOrderWithLinkedHashMapShouldThrowAssertError() throws IOException {
|
public void testWriteMapWithConsistentOrderWithLinkedHashMapShouldThrowAssertError() throws IOException {
|
||||||
BytesStreamOutput output = new BytesStreamOutput();
|
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||||
Map<String, Object> map = new LinkedHashMap<>();
|
Map<String, Object> map = new LinkedHashMap<>();
|
||||||
Throwable e = expectThrows(AssertionError.class, () -> output.writeMapWithConsistentOrder(map));
|
Throwable e = expectThrows(AssertionError.class, () -> output.writeMapWithConsistentOrder(map));
|
||||||
assertEquals(AssertionError.class, e.getClass());
|
assertEquals(AssertionError.class, e.getClass());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <K, V> Map<K, V> randomMap(Map<K, V> map, int size, Supplier<K> keyGenerator, Supplier<V> valueGenerator) {
|
private static <K, V> Map<K, V> randomMap(Map<K, V> map, int size, Supplier<K> keyGenerator, Supplier<V> valueGenerator) {
|
||||||
|
|
|
@ -123,48 +123,30 @@ public class ByteSizeValueTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFailOnMissingUnits() {
|
public void testFailOnMissingUnits() {
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("23", "test"));
|
||||||
ByteSizeValue.parseBytesSizeValue("23", "test");
|
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFailOnUnknownUnits() {
|
public void testFailOnUnknownUnits() {
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("23jw", "test"));
|
||||||
ByteSizeValue.parseBytesSizeValue("23jw", "test");
|
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFailOnEmptyParsing() {
|
public void testFailOnEmptyParsing() {
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||||
assertThat(ByteSizeValue.parseBytesSizeValue("", "emptyParsing").toString(), is("23kb"));
|
() -> assertThat(ByteSizeValue.parseBytesSizeValue("", "emptyParsing").toString(), is("23kb")));
|
||||||
fail("Expected ElasticsearchParseException");
|
assertThat(e.getMessage(), containsString("failed to parse setting [emptyParsing]"));
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("failed to parse setting [emptyParsing]"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFailOnEmptyNumberParsing() {
|
public void testFailOnEmptyNumberParsing() {
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class,
|
||||||
assertThat(ByteSizeValue.parseBytesSizeValue("g", "emptyNumberParsing").toString(), is("23b"));
|
() -> assertThat(ByteSizeValue.parseBytesSizeValue("g", "emptyNumberParsing").toString(), is("23b")));
|
||||||
fail("Expected ElasticsearchParseException");
|
assertThat(e.getMessage(), containsString("failed to parse [g]"));
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("failed to parse [g]"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNoDotsAllowed() {
|
public void testNoDotsAllowed() {
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> ByteSizeValue.parseBytesSizeValue("42b.", null, "test"));
|
||||||
ByteSizeValue.parseBytesSizeValue("42b.", null, "test");
|
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("failed to parse setting [test]"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCompareEquality() {
|
public void testCompareEquality() {
|
||||||
|
|
|
@ -87,7 +87,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase {
|
||||||
|
|
||||||
currentNodes = DiscoveryNodes.builder();
|
currentNodes = DiscoveryNodes.builder();
|
||||||
currentNodes.masterNodeId("b").add(new DiscoveryNode("b", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT));
|
currentNodes.masterNodeId("b").add(new DiscoveryNode("b", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT));
|
||||||
;
|
|
||||||
// version isn't taken into account, so randomize it to ensure this.
|
// version isn't taken into account, so randomize it to ensure this.
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
currentState.version(2);
|
currentState.version(2);
|
||||||
|
|
|
@ -41,7 +41,6 @@ import org.hamcrest.CoreMatchers;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.lang.NumberFormatException;
|
|
||||||
|
|
||||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
|
import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom;
|
||||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||||
|
@ -256,85 +255,60 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
.endObject()
|
.endObject()
|
||||||
.bytes());
|
.bytes());
|
||||||
|
|
||||||
try {
|
expectThrows(MapperParsingException.class, () ->
|
||||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||||
.startObject()
|
.startObject()
|
||||||
.startObject("point").field("lat", -91).field("lon", 1.3).endObject()
|
.startObject("point").field("lat", -91).field("lon", 1.3).endObject()
|
||||||
.endObject()
|
.endObject()
|
||||||
.bytes());
|
.bytes()));
|
||||||
fail();
|
|
||||||
} catch (MapperParsingException e) {
|
|
||||||
|
|
||||||
}
|
expectThrows(MapperParsingException.class, () ->
|
||||||
|
|
||||||
try {
|
|
||||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||||
.startObject()
|
.startObject()
|
||||||
.startObject("point").field("lat", 91).field("lon", 1.3).endObject()
|
.startObject("point").field("lat", 91).field("lon", 1.3).endObject()
|
||||||
.endObject()
|
.endObject()
|
||||||
.bytes());
|
.bytes()));
|
||||||
fail();
|
|
||||||
} catch (MapperParsingException e) {
|
|
||||||
|
|
||||||
}
|
expectThrows(MapperParsingException.class, () ->
|
||||||
|
|
||||||
try {
|
|
||||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||||
.startObject()
|
.startObject()
|
||||||
.startObject("point").field("lat", 1.2).field("lon", -181).endObject()
|
.startObject("point").field("lat", 1.2).field("lon", -181).endObject()
|
||||||
.endObject()
|
.endObject()
|
||||||
.bytes());
|
.bytes()));
|
||||||
fail();
|
|
||||||
} catch (MapperParsingException e) {
|
|
||||||
|
|
||||||
}
|
expectThrows(MapperParsingException.class, () ->
|
||||||
|
|
||||||
try {
|
|
||||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||||
.startObject()
|
.startObject()
|
||||||
.startObject("point").field("lat", 1.2).field("lon", 181).endObject()
|
.startObject("point").field("lat", 1.2).field("lon", 181).endObject()
|
||||||
.endObject()
|
.endObject()
|
||||||
.bytes());
|
.bytes()));
|
||||||
fail();
|
|
||||||
} catch (MapperParsingException e) {
|
|
||||||
|
|
||||||
}
|
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
|
||||||
|
|
||||||
try {
|
|
||||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||||
.startObject()
|
.startObject()
|
||||||
.startObject("point").field("lat", "-").field("lon", 1.3).endObject()
|
.startObject("point").field("lat", "-").field("lon", 1.3).endObject()
|
||||||
.endObject()
|
.endObject()
|
||||||
.bytes());
|
.bytes()));
|
||||||
fail();
|
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
||||||
} catch (MapperParsingException e) {
|
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
||||||
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
|
||||||
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
e = expectThrows(MapperParsingException.class, () ->
|
||||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||||
.startObject()
|
.startObject()
|
||||||
.startObject("point").field("lat", 1.2).field("lon", "-").endObject()
|
.startObject("point").field("lat", 1.2).field("lon", "-").endObject()
|
||||||
.endObject()
|
.endObject()
|
||||||
.bytes());
|
.bytes()));
|
||||||
fail();
|
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
||||||
} catch (MapperParsingException e) {
|
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
||||||
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
|
||||||
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
e = expectThrows(MapperParsingException.class, () ->
|
||||||
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||||
.startObject()
|
.startObject()
|
||||||
.startObject("point").field("lat", "-").field("lon", "-").endObject()
|
.startObject("point").field("lat", "-").field("lon", "-").endObject()
|
||||||
.endObject()
|
.endObject()
|
||||||
.bytes());
|
.bytes()));
|
||||||
fail();
|
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
||||||
} catch (MapperParsingException e) {
|
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
||||||
assertThat(e.getRootCause(), instanceOf(NumberFormatException.class));
|
|
||||||
assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\""));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNoValidateLegacyLatLonValues() throws Exception {
|
public void testNoValidateLegacyLatLonValues() throws Exception {
|
||||||
|
@ -743,92 +717,84 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||||
try {
|
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||||
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
.startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject().endObject().endObject()
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject()
|
.string();
|
||||||
.endObject().endObject().string();
|
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||||
parser.parse("type", new CompressedXContent(normalizeMapping));
|
parser.parse("type", new CompressedXContent(normalizeMapping)));
|
||||||
} catch (MapperParsingException e) {
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [geohash : true]");
|
||||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [geohash : true]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
{
|
||||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
.startObject("point").field("type", "geo_point");
|
||||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||||
}
|
}
|
||||||
String validateMapping = xContentBuilder.field("validate", true).endObject().endObject().endObject().endObject().string();
|
String validateMapping = xContentBuilder.field("validate", true).endObject().endObject().endObject().endObject().string();
|
||||||
parser.parse("type", new CompressedXContent(validateMapping));
|
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
parser.parse("type", new CompressedXContent(validateMapping)));
|
||||||
} catch (MapperParsingException e) {
|
|
||||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]");
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]");
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
{
|
||||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||||
}
|
}
|
||||||
String validateMapping = xContentBuilder.field("validate_lat", true).endObject().endObject().endObject().endObject().string();
|
String validateMapping = xContentBuilder.field("validate_lat", true).endObject().endObject().endObject().endObject().string();
|
||||||
parser.parse("type", new CompressedXContent(validateMapping));
|
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
parser.parse("type", new CompressedXContent(validateMapping)));
|
||||||
} catch (MapperParsingException e) {
|
|
||||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]");
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]");
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
{
|
||||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||||
}
|
}
|
||||||
String validateMapping = xContentBuilder.field("validate_lon", true).endObject().endObject().endObject().endObject().string();
|
String validateMapping = xContentBuilder.field("validate_lon", true).endObject().endObject().endObject().endObject().string();
|
||||||
parser.parse("type", new CompressedXContent(validateMapping));
|
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
parser.parse("type", new CompressedXContent(validateMapping)));
|
||||||
} catch (MapperParsingException e) {
|
|
||||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]");
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]");
|
||||||
}
|
}
|
||||||
|
|
||||||
// test deprecated normalize
|
// test deprecated normalize
|
||||||
try {
|
{
|
||||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||||
}
|
}
|
||||||
String normalizeMapping = xContentBuilder.field("normalize", true).endObject().endObject().endObject().endObject().string();
|
String normalizeMapping = xContentBuilder.field("normalize", true).endObject().endObject().endObject().endObject().string();
|
||||||
parser.parse("type", new CompressedXContent(normalizeMapping));
|
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
parser.parse("type", new CompressedXContent(normalizeMapping)));
|
||||||
} catch (MapperParsingException e) {
|
|
||||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]");
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]");
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
{
|
||||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||||
}
|
}
|
||||||
String normalizeMapping = xContentBuilder.field("normalize_lat", true).endObject().endObject().endObject().endObject().string();
|
String normalizeMapping = xContentBuilder.field("normalize_lat", true).endObject().endObject().endObject().endObject().string();
|
||||||
parser.parse("type", new CompressedXContent(normalizeMapping));
|
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
parser.parse("type", new CompressedXContent(normalizeMapping)));
|
||||||
} catch (MapperParsingException e) {
|
|
||||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]");
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]");
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
{
|
||||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||||
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) {
|
||||||
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true);
|
||||||
}
|
}
|
||||||
String normalizeMapping = xContentBuilder.field("normalize_lon", true).endObject().endObject().endObject().endObject().string();
|
String normalizeMapping = xContentBuilder.field("normalize_lon", true).endObject().endObject().endObject().endObject().string();
|
||||||
parser.parse("type", new CompressedXContent(normalizeMapping));
|
Exception e = expectThrows(MapperParsingException.class, () ->
|
||||||
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
parser.parse("type", new CompressedXContent(normalizeMapping)));
|
||||||
} catch (MapperParsingException e) {
|
|
||||||
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]");
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -844,20 +810,17 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
|
||||||
.field("geohash", false).endObject().endObject().endObject().endObject().string();
|
.field("geohash", false).endObject().endObject().endObject().endObject().string();
|
||||||
try {
|
Exception e = expectThrows(IllegalArgumentException.class, () ->
|
||||||
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false);
|
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false));
|
||||||
fail();
|
assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]"));
|
||||||
} catch (IllegalArgumentException e) {
|
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash]"));
|
||||||
assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]"));
|
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash_precision]"));
|
||||||
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash]"));
|
|
||||||
assertThat(e.getMessage(), containsString("mapper [point] has different [geohash_precision]"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// correct mapping and ensure no failures
|
// correct mapping and ensure no failures
|
||||||
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String stage2MappingCorrect = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
|
||||||
.field("geohash", true).endObject().endObject().endObject().endObject().string();
|
.field("geohash", true).endObject().endObject().endObject().endObject().string();
|
||||||
mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false);
|
mapperService.merge("type", new CompressedXContent(stage2MappingCorrect), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLegacyGeoHashSearch() throws Exception {
|
public void testLegacyGeoHashSearch() throws Exception {
|
||||||
|
|
|
@ -22,13 +22,8 @@ package org.elasticsearch.index.mapper;
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
|
||||||
import org.elasticsearch.index.mapper.ObjectMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||||
import org.elasticsearch.index.mapper.ObjectMapper.Dynamic;
|
import org.elasticsearch.index.mapper.ObjectMapper.Dynamic;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
|
||||||
import org.elasticsearch.index.mapper.TypeFieldMapper;
|
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -366,22 +361,16 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
|
||||||
createIndex("test1").mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false);
|
createIndex("test1").mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false);
|
||||||
|
|
||||||
// explicitly setting limit to 0 prevents nested fields
|
// explicitly setting limit to 0 prevents nested fields
|
||||||
try {
|
Exception e = expectThrows(IllegalArgumentException.class, () ->
|
||||||
createIndex("test2", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
|
createIndex("test2", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
|
||||||
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false);
|
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false));
|
||||||
fail("Expected IllegalArgumentException");
|
assertThat(e.getMessage(), containsString("Limit of nested fields [0] in index [test2] has been exceeded"));
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("Limit of nested fields [0] in index [test2] has been exceeded"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// setting limit to 1 with 2 nested fields fails
|
// setting limit to 1 with 2 nested fields fails
|
||||||
try {
|
e = expectThrows(IllegalArgumentException.class, () ->
|
||||||
createIndex("test3", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 1).build())
|
createIndex("test3", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 1).build())
|
||||||
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false);
|
.mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false));
|
||||||
fail("Expected IllegalArgumentException");
|
assertThat(e.getMessage(), containsString("Limit of nested fields [1] in index [test3] has been exceeded"));
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("Limit of nested fields [1] in index [test3] has been exceeded"));
|
|
||||||
}
|
|
||||||
|
|
||||||
MapperService mapperService = createIndex("test4", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 2)
|
MapperService mapperService = createIndex("test4", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 2)
|
||||||
.build()).mapperService();
|
.build()).mapperService();
|
||||||
|
@ -391,12 +380,9 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
|
||||||
// adding new fields from different type is not ok
|
// adding new fields from different type is not ok
|
||||||
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type3").startObject("properties").startObject("nested3")
|
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type3").startObject("properties").startObject("nested3")
|
||||||
.field("type", "nested").startObject("properties").endObject().endObject().endObject().endObject().endObject().string();
|
.field("type", "nested").startObject("properties").endObject().endObject().endObject().endObject().endObject().string();
|
||||||
try {
|
e = expectThrows(IllegalArgumentException.class, () ->
|
||||||
mapperService.merge("type3", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false);
|
mapperService.merge("type3", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false));
|
||||||
fail("Expected IllegalArgumentException");
|
assertThat(e.getMessage(), containsString("Limit of nested fields [2] in index [test4] has been exceeded"));
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("Limit of nested fields [2] in index [test4] has been exceeded"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// do not check nested fields limit if mapping is not updated
|
// do not check nested fields limit if mapping is not updated
|
||||||
createIndex("test5", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
|
createIndex("test5", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build())
|
||||||
|
|
|
@ -30,20 +30,11 @@ import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
|
||||||
import org.elasticsearch.common.joda.Joda;
|
import org.elasticsearch.common.joda.Joda;
|
||||||
import org.elasticsearch.common.lucene.uid.Versions;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
|
||||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
|
||||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
|
||||||
import org.elasticsearch.index.mapper.SourceToParse;
|
|
||||||
import org.elasticsearch.index.mapper.TimestampFieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
@ -57,7 +48,6 @@ import java.util.Collection;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
|
|
||||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||||
import static org.elasticsearch.test.VersionUtils.randomVersion;
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
@ -212,12 +202,9 @@ public class TimestampFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
.field("default", (String) null)
|
.field("default", (String) null)
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject().endObject();
|
.endObject().endObject();
|
||||||
try {
|
TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService()
|
||||||
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
.documentMapperParser().parse("type", new CompressedXContent(mapping.string())));
|
||||||
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
|
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
|
||||||
} catch (TimestampParsingException e) {
|
|
||||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||||
|
@ -229,12 +216,9 @@ public class TimestampFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject().endObject();
|
.endObject().endObject();
|
||||||
|
|
||||||
try {
|
TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService()
|
||||||
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
.documentMapperParser().parse("type", new CompressedXContent(mapping.string())));
|
||||||
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set to null");
|
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
|
||||||
} catch (TimestampParsingException e) {
|
|
||||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||||
|
@ -247,12 +231,9 @@ public class TimestampFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject().endObject();
|
.endObject().endObject();
|
||||||
|
|
||||||
try {
|
TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService()
|
||||||
createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string()));
|
.documentMapperParser().parse("type", new CompressedXContent(mapping.string())));
|
||||||
fail("we should reject the mapping with a TimestampParsingException: default timestamp can not be set with ignore_missing set to false");
|
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false"));
|
||||||
} catch (TimestampParsingException e) {
|
|
||||||
assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
// Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null]
|
||||||
|
|
|
@ -30,8 +30,8 @@ import org.elasticsearch.test.geo.RandomGeoGenerator;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.is;
|
|
||||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
|
||||||
public class GeoPointParsingTests extends ESTestCase {
|
public class GeoPointParsingTests extends ESTestCase {
|
||||||
static double TOLERANCE = 1E-5;
|
static double TOLERANCE = 1E-5;
|
||||||
|
@ -112,13 +112,8 @@ public class GeoPointParsingTests extends ESTestCase {
|
||||||
|
|
||||||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
try {
|
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testInvalidPointLatHashMix() throws IOException {
|
public void testInvalidPointLatHashMix() throws IOException {
|
||||||
|
@ -130,12 +125,8 @@ public class GeoPointParsingTests extends ESTestCase {
|
||||||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
|
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("field must be either lat/lon or geohash"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("field must be either lat/lon or geohash"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testInvalidPointLonHashMix() throws IOException {
|
public void testInvalidPointLonHashMix() throws IOException {
|
||||||
|
@ -147,12 +138,8 @@ public class GeoPointParsingTests extends ESTestCase {
|
||||||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
|
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("field must be either lat/lon or geohash"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("field must be either lat/lon or geohash"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testInvalidField() throws IOException {
|
public void testInvalidField() throws IOException {
|
||||||
|
@ -164,12 +151,8 @@ public class GeoPointParsingTests extends ESTestCase {
|
||||||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
|
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static XContentParser objectLatLon(double lat, double lon) throws IOException {
|
private static XContentParser objectLatLon(double lat, double lon) throws IOException {
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.search.geo;
|
package org.elasticsearch.index.search.geo;
|
||||||
|
|
||||||
import org.locationtech.spatial4j.context.SpatialContext;
|
|
||||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
|
||||||
import org.apache.lucene.spatial.prefix.tree.Cell;
|
import org.apache.lucene.spatial.prefix.tree.Cell;
|
||||||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||||
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
||||||
|
@ -33,6 +31,8 @@ import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.locationtech.spatial4j.context.SpatialContext;
|
||||||
|
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -439,12 +439,8 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
BytesReference jsonBytes = jsonBuilder().startObject().field("geohash", 1.0).endObject().bytes();
|
BytesReference jsonBytes = jsonBuilder().startObject().field("geohash", 1.0).endObject().bytes();
|
||||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), containsString("geohash must be a string"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("geohash must be a string"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseGeoPointLatNoLon() throws IOException {
|
public void testParseGeoPointLatNoLon() throws IOException {
|
||||||
|
@ -452,12 +448,8 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).endObject().bytes();
|
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).endObject().bytes();
|
||||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("field [lon] missing"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("field [lon] missing"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseGeoPointLonNoLat() throws IOException {
|
public void testParseGeoPointLonNoLat() throws IOException {
|
||||||
|
@ -465,12 +457,8 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
BytesReference jsonBytes = jsonBuilder().startObject().field("lon", lon).endObject().bytes();
|
BytesReference jsonBytes = jsonBuilder().startObject().field("lon", lon).endObject().bytes();
|
||||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("field [lat] missing"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("field [lat] missing"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseGeoPointLonWrongType() throws IOException {
|
public void testParseGeoPointLonWrongType() throws IOException {
|
||||||
|
@ -478,12 +466,8 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).field("lon", false).endObject().bytes();
|
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).field("lon", false).endObject().bytes();
|
||||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("longitude must be a number"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("longitude must be a number"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseGeoPointLatWrongType() throws IOException {
|
public void testParseGeoPointLatWrongType() throws IOException {
|
||||||
|
@ -491,12 +475,8 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", false).field("lon", lon).endObject().bytes();
|
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", false).field("lon", lon).endObject().bytes();
|
||||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("latitude must be a number"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("latitude must be a number"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseGeoPointExtraField() throws IOException {
|
public void testParseGeoPointExtraField() throws IOException {
|
||||||
|
@ -505,12 +485,8 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("foo", true).endObject().bytes();
|
BytesReference jsonBytes = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("foo", true).endObject().bytes();
|
||||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseGeoPointLonLatGeoHash() throws IOException {
|
public void testParseGeoPointLonLatGeoHash() throws IOException {
|
||||||
|
@ -521,12 +497,8 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
.bytes();
|
.bytes();
|
||||||
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
XContentParser parser = XContentHelper.createParser(jsonBytes);
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseGeoPointArrayTooManyValues() throws IOException {
|
public void testParseGeoPointArrayTooManyValues() throws IOException {
|
||||||
|
@ -539,12 +511,8 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
while (parser.currentToken() != Token.START_ARRAY) {
|
while (parser.currentToken() != Token.START_ARRAY) {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
}
|
}
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("only two values allowed"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("only two values allowed"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseGeoPointArrayWrongType() throws IOException {
|
public void testParseGeoPointArrayWrongType() throws IOException {
|
||||||
|
@ -555,12 +523,8 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
while (parser.currentToken() != Token.START_ARRAY) {
|
while (parser.currentToken() != Token.START_ARRAY) {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
}
|
}
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("numeric value expected"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("numeric value expected"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseGeoPointInvalidType() throws IOException {
|
public void testParseGeoPointInvalidType() throws IOException {
|
||||||
|
@ -569,12 +533,8 @@ public class GeoUtilsTests extends ESTestCase {
|
||||||
while (parser.currentToken() != Token.VALUE_NUMBER) {
|
while (parser.currentToken() != Token.VALUE_NUMBER) {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
}
|
}
|
||||||
try {
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser));
|
||||||
GeoUtils.parseGeoPoint(parser);
|
assertThat(e.getMessage(), is("geo_point expected"));
|
||||||
fail("Expected ElasticsearchParseException");
|
|
||||||
} catch (ElasticsearchParseException e) {
|
|
||||||
assertThat(e.getMessage(), is("geo_point expected"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPrefixTreeCellSizes() {
|
public void testPrefixTreeCellSizes() {
|
||||||
|
|
|
@ -470,8 +470,6 @@ public class IndexShardTests extends IndexShardTestCase {
|
||||||
throw new RuntimeException(ex);
|
throw new RuntimeException(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
;
|
|
||||||
};
|
};
|
||||||
thread[i].start();
|
thread[i].start();
|
||||||
}
|
}
|
||||||
|
@ -1172,6 +1170,7 @@ public class IndexShardTests extends IndexShardTestCase {
|
||||||
throw new RuntimeException("boom");
|
throw new RuntimeException("boom");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
public IndexSearcher wrap(IndexSearcher searcher) throws EngineException {
|
||||||
return searcher;
|
return searcher;
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,9 +64,8 @@ public class ShardPathTests extends ESTestCase {
|
||||||
assumeTrue("This test tests multi data.path but we only got one", paths.length > 1);
|
assumeTrue("This test tests multi data.path but we only got one", paths.length > 1);
|
||||||
int id = randomIntBetween(1, 10);
|
int id = randomIntBetween(1, 10);
|
||||||
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, indexUUID, AllocationId.newInitializing()), paths);
|
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, indexUUID, AllocationId.newInitializing()), paths);
|
||||||
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings));
|
Exception e = expectThrows(IllegalStateException.class, () ->
|
||||||
fail("Expected IllegalStateException");
|
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)));
|
||||||
} catch (IllegalStateException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("more than one shard state found"));
|
assertThat(e.getMessage(), containsString("more than one shard state found"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -81,9 +80,8 @@ public class ShardPathTests extends ESTestCase {
|
||||||
Path path = randomFrom(paths);
|
Path path = randomFrom(paths);
|
||||||
int id = randomIntBetween(1, 10);
|
int id = randomIntBetween(1, 10);
|
||||||
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), path);
|
ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), path);
|
||||||
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings));
|
Exception e = expectThrows(IllegalStateException.class, () ->
|
||||||
fail("Expected IllegalStateException");
|
ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)));
|
||||||
} catch (IllegalStateException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("expected: foobar on shard path"));
|
assertThat(e.getMessage(), containsString("expected: foobar on shard path"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -91,12 +89,8 @@ public class ShardPathTests extends ESTestCase {
|
||||||
public void testIllegalCustomDataPath() {
|
public void testIllegalCustomDataPath() {
|
||||||
Index index = new Index("foo", "foo");
|
Index index = new Index("foo", "foo");
|
||||||
final Path path = createTempDir().resolve(index.getUUID()).resolve("0");
|
final Path path = createTempDir().resolve(index.getUUID()).resolve("0");
|
||||||
try {
|
Exception e = expectThrows(IllegalArgumentException.class, () -> new ShardPath(true, path, path, new ShardId(index, 0)));
|
||||||
new ShardPath(true, path, path, new ShardId(index, 0));
|
assertThat(e.getMessage(), is("shard state path must be different to the data path when using custom data paths"));
|
||||||
fail("Expected IllegalArgumentException");
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat(e.getMessage(), is("shard state path must be different to the data path when using custom data paths"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testValidCtor() {
|
public void testValidCtor() {
|
||||||
|
|
|
@ -45,7 +45,6 @@ import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.settings.SettingsModule;
|
|
||||||
import org.elasticsearch.index.IndexNotFoundException;
|
import org.elasticsearch.index.IndexNotFoundException;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
|
@ -444,21 +443,15 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
|
||||||
|
|
||||||
public void testAllMissingStrict() throws Exception {
|
public void testAllMissingStrict() throws Exception {
|
||||||
createIndex("test1");
|
createIndex("test1");
|
||||||
try {
|
expectThrows(IndexNotFoundException.class, () ->
|
||||||
client().prepareSearch("test2")
|
client().prepareSearch("test2")
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.execute().actionGet();
|
.execute().actionGet());
|
||||||
fail("Exception should have been thrown.");
|
|
||||||
} catch (IndexNotFoundException e) {
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
expectThrows(IndexNotFoundException.class, () ->
|
||||||
client().prepareSearch("test2","test3")
|
client().prepareSearch("test2","test3")
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.execute().actionGet();
|
.execute().actionGet());
|
||||||
fail("Exception should have been thrown.");
|
|
||||||
} catch (IndexNotFoundException e) {
|
|
||||||
}
|
|
||||||
|
|
||||||
//you should still be able to run empty searches without things blowing up
|
//you should still be able to run empty searches without things blowing up
|
||||||
client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet();
|
client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet();
|
||||||
|
|
|
@ -70,22 +70,16 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
||||||
|
|
||||||
public void testSimpleCloseMissingIndex() {
|
public void testSimpleCloseMissingIndex() {
|
||||||
Client client = client();
|
Client client = client();
|
||||||
try {
|
Exception e = expectThrows(IndexNotFoundException.class, () ->
|
||||||
client.admin().indices().prepareClose("test1").execute().actionGet();
|
client.admin().indices().prepareClose("test1").execute().actionGet());
|
||||||
fail("Expected IndexNotFoundException");
|
assertThat(e.getMessage(), is("no such index"));
|
||||||
} catch (IndexNotFoundException e) {
|
|
||||||
assertThat(e.getMessage(), is("no such index"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimpleOpenMissingIndex() {
|
public void testSimpleOpenMissingIndex() {
|
||||||
Client client = client();
|
Client client = client();
|
||||||
try {
|
Exception e = expectThrows(IndexNotFoundException.class, () ->
|
||||||
client.admin().indices().prepareOpen("test1").execute().actionGet();
|
client.admin().indices().prepareOpen("test1").execute().actionGet());
|
||||||
fail("Expected IndexNotFoundException");
|
assertThat(e.getMessage(), is("no such index"));
|
||||||
} catch (IndexNotFoundException e) {
|
|
||||||
assertThat(e.getMessage(), is("no such index"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCloseOneMissingIndex() {
|
public void testCloseOneMissingIndex() {
|
||||||
|
@ -93,12 +87,9 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
||||||
createIndex("test1");
|
createIndex("test1");
|
||||||
ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
|
ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
|
||||||
assertThat(healthResponse.isTimedOut(), equalTo(false));
|
assertThat(healthResponse.isTimedOut(), equalTo(false));
|
||||||
try {
|
Exception e = expectThrows(IndexNotFoundException.class, () ->
|
||||||
client.admin().indices().prepareClose("test1", "test2").execute().actionGet();
|
client.admin().indices().prepareClose("test1", "test2").execute().actionGet());
|
||||||
fail("Expected IndexNotFoundException");
|
assertThat(e.getMessage(), is("no such index"));
|
||||||
} catch (IndexNotFoundException e) {
|
|
||||||
assertThat(e.getMessage(), is("no such index"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCloseOneMissingIndexIgnoreMissing() {
|
public void testCloseOneMissingIndexIgnoreMissing() {
|
||||||
|
@ -117,12 +108,9 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
||||||
createIndex("test1");
|
createIndex("test1");
|
||||||
ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
|
ClusterHealthResponse healthResponse = client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
|
||||||
assertThat(healthResponse.isTimedOut(), equalTo(false));
|
assertThat(healthResponse.isTimedOut(), equalTo(false));
|
||||||
try {
|
Exception e = expectThrows(IndexNotFoundException.class, () ->
|
||||||
client.admin().indices().prepareOpen("test1", "test2").execute().actionGet();
|
client.admin().indices().prepareOpen("test1", "test2").execute().actionGet());
|
||||||
fail("Expected IndexNotFoundException");
|
assertThat(e.getMessage(), is("no such index"));
|
||||||
} catch (IndexNotFoundException e) {
|
|
||||||
assertThat(e.getMessage(), is("no such index"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOpenOneMissingIndexIgnoreMissing() {
|
public void testOpenOneMissingIndexIgnoreMissing() {
|
||||||
|
@ -204,42 +192,30 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
||||||
|
|
||||||
public void testCloseNoIndex() {
|
public void testCloseNoIndex() {
|
||||||
Client client = client();
|
Client client = client();
|
||||||
try {
|
Exception e = expectThrows(ActionRequestValidationException.class, () ->
|
||||||
client.admin().indices().prepareClose().execute().actionGet();
|
client.admin().indices().prepareClose().execute().actionGet());
|
||||||
fail("Expected ActionRequestValidationException");
|
assertThat(e.getMessage(), containsString("index is missing"));
|
||||||
} catch (ActionRequestValidationException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("index is missing"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCloseNullIndex() {
|
public void testCloseNullIndex() {
|
||||||
Client client = client();
|
Client client = client();
|
||||||
try {
|
Exception e = expectThrows(ActionRequestValidationException.class, () ->
|
||||||
client.admin().indices().prepareClose((String[])null).execute().actionGet();
|
client.admin().indices().prepareClose((String[])null).execute().actionGet());
|
||||||
fail("Expected ActionRequestValidationException");
|
assertThat(e.getMessage(), containsString("index is missing"));
|
||||||
} catch (ActionRequestValidationException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("index is missing"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOpenNoIndex() {
|
public void testOpenNoIndex() {
|
||||||
Client client = client();
|
Client client = client();
|
||||||
try {
|
Exception e = expectThrows(ActionRequestValidationException.class, () ->
|
||||||
client.admin().indices().prepareOpen().execute().actionGet();
|
client.admin().indices().prepareOpen().execute().actionGet());
|
||||||
fail("Expected ActionRequestValidationException");
|
assertThat(e.getMessage(), containsString("index is missing"));
|
||||||
} catch (ActionRequestValidationException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("index is missing"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOpenNullIndex() {
|
public void testOpenNullIndex() {
|
||||||
Client client = client();
|
Client client = client();
|
||||||
try {
|
Exception e = expectThrows(ActionRequestValidationException.class, () ->
|
||||||
client.admin().indices().prepareOpen((String[])null).execute().actionGet();
|
client.admin().indices().prepareOpen((String[])null).execute().actionGet());
|
||||||
fail("Expected ActionRequestValidationException");
|
assertThat(e.getMessage(), containsString("index is missing"));
|
||||||
} catch (ActionRequestValidationException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("index is missing"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testOpenAlreadyOpenedIndex() {
|
public void testOpenAlreadyOpenedIndex() {
|
||||||
|
|
|
@ -535,13 +535,10 @@ public class SearchQueryIT extends ESIntegTestCase {
|
||||||
searchResponse = client().prepareSearch().setQuery(queryStringQuery("future:[now/d TO now+2M/d]").lowercaseExpandedTerms(false)).get();
|
searchResponse = client().prepareSearch().setQuery(queryStringQuery("future:[now/d TO now+2M/d]").lowercaseExpandedTerms(false)).get();
|
||||||
assertHitCount(searchResponse, 1L);
|
assertHitCount(searchResponse, 1L);
|
||||||
|
|
||||||
try {
|
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch()
|
||||||
client().prepareSearch().setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lowercaseExpandedTerms(false)).get();
|
.setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lowercaseExpandedTerms(false)).get());
|
||||||
fail("expected SearchPhaseExecutionException (total failure)");
|
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||||
} catch (SearchPhaseExecutionException e) {
|
assertThat(e.toString(), containsString("unit [D] not supported for date math"));
|
||||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
|
||||||
assertThat(e.toString(), containsString("unit [D] not supported for date math"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Issue #7880
|
// Issue #7880
|
||||||
|
@ -776,12 +773,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
||||||
searchResponse = client().prepareSearch().setQuery(matchQuery("double", "2")).get();
|
searchResponse = client().prepareSearch().setQuery(matchQuery("double", "2")).get();
|
||||||
assertHitCount(searchResponse, 1L);
|
assertHitCount(searchResponse, 1L);
|
||||||
assertFirstHit(searchResponse, hasId("2"));
|
assertFirstHit(searchResponse, hasId("2"));
|
||||||
try {
|
expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch().setQuery(matchQuery("double", "2 3 4")).get());
|
||||||
client().prepareSearch().setQuery(matchQuery("double", "2 3 4")).get();
|
|
||||||
fail("SearchPhaseExecutionException should have been thrown");
|
|
||||||
} catch (SearchPhaseExecutionException ex) {
|
|
||||||
// number format exception
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultiMatchQuery() throws Exception {
|
public void testMultiMatchQuery() throws Exception {
|
||||||
|
@ -1777,15 +1769,11 @@ public class SearchQueryIT extends ESIntegTestCase {
|
||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
//has_child fails if executed on "simple" index
|
//has_child fails if executed on "simple" index
|
||||||
try {
|
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class,
|
||||||
client().prepareSearch("simple")
|
() -> client().prepareSearch("simple").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get());
|
||||||
.setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
|
assertThat(e.shardFailures().length, greaterThan(0));
|
||||||
fail("Should have failed as has_child query can only be executed against parent-child types");
|
for (ShardSearchFailure shardSearchFailure : e.shardFailures()) {
|
||||||
} catch (SearchPhaseExecutionException e) {
|
assertThat(shardSearchFailure.reason(), containsString("no mapping found for type [child]"));
|
||||||
assertThat(e.shardFailures().length, greaterThan(0));
|
|
||||||
for (ShardSearchFailure shardSearchFailure : e.shardFailures()) {
|
|
||||||
assertThat(shardSearchFailure.reason(), containsString("no mapping found for type [child]"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//has_child doesn't get parsed for "simple" index
|
//has_child doesn't get parsed for "simple" index
|
||||||
|
@ -1983,14 +1971,10 @@ public class SearchQueryIT extends ESIntegTestCase {
|
||||||
assertThat(searchResponse.getHits().getAt(0).getId(), is("3"));
|
assertThat(searchResponse.getHits().getAt(0).getId(), is("3"));
|
||||||
|
|
||||||
// When we use long values, it means we have ms since epoch UTC based so we don't apply any transformation
|
// When we use long values, it means we have ms since epoch UTC based so we don't apply any transformation
|
||||||
try {
|
Exception e = expectThrows(SearchPhaseExecutionException.class, () ->
|
||||||
client().prepareSearch("test")
|
client().prepareSearch("test")
|
||||||
.setQuery(QueryBuilders.rangeQuery("date").from(1388534400000L).to(1388537940999L).timeZone("+01:00"))
|
.setQuery(QueryBuilders.rangeQuery("date").from(1388534400000L).to(1388537940999L).timeZone("+01:00"))
|
||||||
.get();
|
.get());
|
||||||
fail("A Range Filter using ms since epoch with a TimeZone should raise a ParsingException");
|
|
||||||
} catch (SearchPhaseExecutionException e) {
|
|
||||||
// We expect it
|
|
||||||
}
|
|
||||||
|
|
||||||
searchResponse = client().prepareSearch("test")
|
searchResponse = client().prepareSearch("test")
|
||||||
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01").to("2014-01-01T00:59:00").timeZone("-01:00"))
|
.setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01").to("2014-01-01T00:59:00").timeZone("-01:00"))
|
||||||
|
@ -2005,14 +1989,10 @@ public class SearchQueryIT extends ESIntegTestCase {
|
||||||
assertThat(searchResponse.getHits().getAt(0).getId(), is("4"));
|
assertThat(searchResponse.getHits().getAt(0).getId(), is("4"));
|
||||||
|
|
||||||
// A Range Filter on a numeric field with a TimeZone should raise an exception
|
// A Range Filter on a numeric field with a TimeZone should raise an exception
|
||||||
try {
|
e = expectThrows(SearchPhaseExecutionException.class, () ->
|
||||||
client().prepareSearch("test")
|
client().prepareSearch("test")
|
||||||
.setQuery(QueryBuilders.rangeQuery("num").from("0").to("4").timeZone("-01:00"))
|
.setQuery(QueryBuilders.rangeQuery("num").from("0").to("4").timeZone("-01:00"))
|
||||||
.get();
|
.get());
|
||||||
fail("A Range Filter on a numeric field with a TimeZone should raise a ParsingException");
|
|
||||||
} catch (SearchPhaseExecutionException e) {
|
|
||||||
// We expect it
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSearchEmptyDoc() {
|
public void testSearchEmptyDoc() {
|
||||||
|
|
Loading…
Reference in New Issue