Merge remote-tracking branch 'origin/branch_6x' into branch_6x

This commit is contained in:
Noble Paul 2016-04-19 13:11:38 +05:30
commit 3eb1ce6cf6
18 changed files with 394 additions and 173 deletions

View File

@ -54,7 +54,7 @@ Optimizations
multiple polygons and holes, with memory usage independent of
polygon complexity. (Karl Wright, Mike McCandless, Robert Muir)
* LUCENE-7159, LUCENE-7222: Speed up LatLonPoint polygon performance for complex
* LUCENE-7159, LUCENE-7222, LUCENE-7229: Speed up LatLonPoint polygon performance for complex
polygons. (Robert Muir)
* LUCENE-7211: Reduce memory & GC for spatial RPT Intersects when the number of

View File

@ -48,9 +48,6 @@ public final class Polygon {
/** maximum longitude of this polygon's bounding box area */
public final double maxLon;
// TODO: refactor to GeoUtils once LUCENE-7165 is complete
private static final double ENCODING_TOLERANCE = 1e-6;
// TODO: we could also compute the maximal inner bounding box, to make relations faster to compute?
/**
@ -234,70 +231,79 @@ public final class Polygon {
return containsCount;
}
private boolean crossesSlowly(double minLat, double maxLat, final double minLon, final double maxLon) {
/*
* Accurately compute (within restrictions of cartesian decimal degrees) whether a rectangle crosses a polygon
*/
final double[] boxLats = new double[] { minLat, minLat, maxLat, maxLat, minLat };
final double[] boxLons = new double[] { minLon, maxLon, maxLon, minLon, minLon };
/** Returns true if the box crosses our polygon */
private boolean crossesSlowly(double minLat, double maxLat, double minLon, double maxLon) {
// we compute line intersections of every polygon edge with every box line.
// if we find one, return true.
// for each box line (AB):
// for each poly line (CD):
// intersects = orient(C,D,A) * orient(C,D,B) <= 0 && orient(A,B,C) * orient(A,B,D) <= 0
for (int i = 1; i < polyLons.length; i++) {
double cy = polyLats[i - 1];
double dy = polyLats[i];
double cx = polyLons[i - 1];
double dx = polyLons[i];
// computes the intersection point between each bbox edge and the polygon edge
for (int b=0; b<4; ++b) {
double a1 = boxLats[b+1]-boxLats[b];
double b1 = boxLons[b]-boxLons[b+1];
double c1 = a1*boxLons[b+1] + b1*boxLats[b+1];
for (int p=0; p<polyLons.length-1; ++p) {
double a2 = polyLats[p+1]-polyLats[p];
double b2 = polyLons[p]-polyLons[p+1];
// compute determinant
double d = a1*b2 - a2*b1;
if (d != 0) {
// lines are not parallel, check intersecting points
double c2 = a2*polyLons[p+1] + b2*polyLats[p+1];
double s = (1/d)*(b2*c1 - b1*c2);
// todo TOLERANCE SHOULD MATCH EVERYWHERE this is currently blocked by LUCENE-7165
double x00 = Math.min(boxLons[b], boxLons[b+1]) - ENCODING_TOLERANCE;
if (x00 > s) {
continue; // out of range
}
double x01 = Math.max(boxLons[b], boxLons[b+1]) + ENCODING_TOLERANCE;
if (x01 < s) {
continue; // out of range
}
double x10 = Math.min(polyLons[p], polyLons[p+1]) - ENCODING_TOLERANCE;
if (x10 > s) {
continue; // out of range
}
double x11 = Math.max(polyLons[p], polyLons[p+1]) + ENCODING_TOLERANCE;
if (x11 < s) {
continue; // out of range
}
// optimization: see if the rectangle is outside of the "bounding box" of the polyline at all
// if not, don't waste our time trying more complicated stuff
if ((cy < minLat && dy < minLat) ||
(cy > maxLat && dy > maxLat) ||
(cx < minLon && dx < minLon) ||
(cx > maxLon && dx > maxLon)) {
continue;
}
double t = (1/d)*(a1*c2 - a2*c1);
double y00 = Math.min(boxLats[b], boxLats[b+1]) - ENCODING_TOLERANCE;
if (y00 > t || (x00 == s && y00 == t)) {
continue; // out of range or touching
}
double y01 = Math.max(boxLats[b], boxLats[b+1]) + ENCODING_TOLERANCE;
if (y01 < t || (x01 == s && y01 == t)) {
continue; // out of range or touching
}
double y10 = Math.min(polyLats[p], polyLats[p+1]) - ENCODING_TOLERANCE;
if (y10 > t || (x10 == s && y10 == t)) {
continue; // out of range or touching
}
double y11 = Math.max(polyLats[p], polyLats[p+1]) + ENCODING_TOLERANCE;
if (y11 < t || (x11 == s && y11 == t)) {
continue; // out of range or touching
}
// if line segments are not touching and the intersection point is within the range of either segment
return true;
}
} // for each poly edge
} // for each bbox edge
// does box's top edge intersect polyline?
// ax = minLon, bx = maxLon, ay = maxLat, by = maxLat
if (orient(cx, cy, dx, dy, minLon, maxLat) * orient(cx, cy, dx, dy, maxLon, maxLat) <= 0 &&
orient(minLon, maxLat, maxLon, maxLat, cx, cy) * orient(minLon, maxLat, maxLon, maxLat, dx, dy) <= 0) {
return true;
}
// does box's right edge intersect polyline?
// ax = maxLon, bx = maxLon, ay = maxLat, by = minLat
if (orient(cx, cy, dx, dy, maxLon, maxLat) * orient(cx, cy, dx, dy, maxLon, minLat) <= 0 &&
orient(maxLon, maxLat, maxLon, minLat, cx, cy) * orient(maxLon, maxLat, maxLon, minLat, dx, dy) <= 0) {
return true;
}
// does box's bottom edge intersect polyline?
// ax = maxLon, bx = minLon, ay = minLat, by = minLat
if (orient(cx, cy, dx, dy, maxLon, minLat) * orient(cx, cy, dx, dy, minLon, minLat) <= 0 &&
orient(maxLon, minLat, minLon, minLat, cx, cy) * orient(maxLon, minLat, minLon, minLat, dx, dy) <= 0) {
return true;
}
// does box's left edge intersect polyline?
// ax = minLon, bx = minLon, ay = minLat, by = maxLat
if (orient(cx, cy, dx, dy, minLon, minLat) * orient(cx, cy, dx, dy, minLon, maxLat) <= 0 &&
orient(minLon, minLat, minLon, maxLat, cx, cy) * orient(minLon, minLat, minLon, maxLat, dx, dy) <= 0) {
return true;
}
}
return false;
}
/**
* Returns a positive value if points a, b, and c are arranged in counter-clockwise order,
* negative value if clockwise, zero if collinear.
*/
// see the "Orient2D" method described here:
// http://www.cs.berkeley.edu/~jrs/meshpapers/robnotes.pdf
// https://www.cs.cmu.edu/~quake/robust.html
// Note that this one does not yet have the floating point tricks to be exact!
private static int orient(double ax, double ay, double bx, double by, double cx, double cy) {
double v1 = (bx - ax) * (cy - ay);
double v2 = (cx - ax) * (by - ay);
if (v1 > v2) {
return 1;
} else if (v1 < v2) {
return -1;
} else {
return 0;
}
}
/** Returns a copy of the internal latitude array */
public double[] getPolyLats() {
return polyLats.clone();

View File

@ -37,7 +37,7 @@ public class SloppyMath {
* specified in decimal degrees (latitude/longitude). This works correctly
* even if the dateline is between the two points.
* <p>
* Error is at most 2E-1 (20cm) from the actual haversine distance, but is typically
* Error is at most 4E-1 (40cm) from the actual haversine distance, but is typically
* much smaller for reasonable distances: around 1E-5 (0.01mm) for distances less than
* 1000km.
*

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.compressing.CompressingCodec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
@ -34,7 +35,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomInts;
/**
* This test creates an index with one segment that is a little larger than 4GB.
*/
@SuppressCodecs({ "SimpleText" })
@SuppressCodecs({ "SimpleText", "Compressing" })
@TimeoutSuite(millis = 4 * TimeUnits.HOUR)
public class Test4GBStoredFields extends LuceneTestCase {
@ -43,13 +44,20 @@ public class Test4GBStoredFields extends LuceneTestCase {
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new MMapDirectory(createTempDir("4GBStoredFields")));
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
.setMergePolicy(newLogMergePolicy(false, 10))
.setOpenMode(IndexWriterConfig.OpenMode.CREATE));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
iwc.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
iwc.setRAMBufferSizeMB(256.0);
iwc.setMergeScheduler(new ConcurrentMergeScheduler());
iwc.setMergePolicy(newLogMergePolicy(false, 10));
iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
// TODO: we disable "Compressing" since it likes to pick very extreme values which will be too slow for this test.
// maybe we should factor out crazy cases to ExtremeCompressing? then annotations can handle this stuff...
if (random().nextBoolean()) {
iwc.setCodec(CompressingCodec.reasonableInstance(random()));
}
IndexWriter w = new IndexWriter(dir, iwc);
MergePolicy mp = w.getConfig().getMergePolicy();
if (mp instanceof LogByteSizeMergePolicy) {

View File

@ -33,7 +33,7 @@ public class TestSloppyMath extends LuceneTestCase {
// accuracy for asin()
static double ASIN_DELTA = 1E-7;
// accuracy for haversinMeters()
static double HAVERSIN_DELTA = 2E-1;
static double HAVERSIN_DELTA = 38E-2;
// accuracy for haversinMeters() for "reasonable" distances (< 1000km)
static double REASONABLE_HAVERSIN_DELTA = 1E-5;
@ -161,11 +161,28 @@ public class TestSloppyMath extends LuceneTestCase {
double lat2 = GeoTestUtil.nextLatitude();
double lon2 = GeoTestUtil.nextLongitude();
double expected = haversinMeters(lat1, lon1, lat2, lon2);
double actual = slowHaversin(lat1, lon1, lat2, lon2);
double expected = slowHaversin(lat1, lon1, lat2, lon2);
double actual = haversinMeters(lat1, lon1, lat2, lon2);
assertEquals(expected, actual, HAVERSIN_DELTA);
}
}
/**
* Step across the whole world to find huge absolute errors.
* Don't rely on random number generator to pick these massive distances. */
public void testAcrossWholeWorldSteps() {
for (int lat1 = -90; lat1 <= 90; lat1 += 10) {
for (int lon1 = -180; lon1 <= 180; lon1 += 10) {
for (int lat2 = -90; lat2 <= 90; lat2 += 10) {
for (int lon2 = -180; lon2 <= 180; lon2 += 10) {
double expected = slowHaversin(lat1, lon1, lat2, lon2);
double actual = haversinMeters(lat1, lon1, lat2, lon2);
assertEquals(expected, actual, HAVERSIN_DELTA);
}
}
}
}
}
public void testAgainstSlowVersionReasonable() {
for (int i = 0; i < 100_000; i++) {

View File

@ -48,7 +48,7 @@ import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
// relational operations as rectangle <-> rectangle relations in integer space in log(n) time..
final class LatLonGrid {
// must be a power of two!
static final int GRID_SIZE = 1<<5;
static final int GRID_SIZE = 1<<7;
final int minLat;
final int maxLat;
final int minLon;

View File

@ -192,6 +192,7 @@ public final class Geo3DPoint extends Field {
for (final Polygon hole : theHoles) {
holeList.add(fromPolygon(hole, !reverseMe));
}
// Now do the polygon itself
final double[] polyLats = polygon.getPolyLats();
final double[] polyLons = polygon.getPolyLons();

View File

@ -62,6 +62,7 @@ public class GeoPolygonFactory {
public static GeoPolygon makeGeoPolygon(final PlanetModel planetModel,
final List<GeoPoint> pointList,
final List<GeoPolygon> holes) {
//System.err.println("points="+pointList);
// Create a random number generator. Effectively this furnishes us with a repeatable sequence
// of points to use for poles.
final Random generator = new Random(1234);
@ -105,17 +106,20 @@ public class GeoPolygonFactory {
// We don't know if this is the correct siding choice. We will only know as we build the complex polygon.
// So we need to be prepared to try both possibilities.
GeoCompositePolygon rval = new GeoCompositePolygon();
if (buildPolygonShape(rval, planetModel, filteredPointList, new BitSet(), 0, 1, initialPlane, holes, testPoint) == false) {
MutableBoolean seenConcave = new MutableBoolean();
if (buildPolygonShape(rval, seenConcave, planetModel, filteredPointList, new BitSet(), 0, 1, initialPlane, holes, testPoint) == false) {
// The testPoint was within the shape. Was that intended?
if (testPointInside) {
// Yes: build it for real
rval = new GeoCompositePolygon();
buildPolygonShape(rval, planetModel, filteredPointList, new BitSet(), 0, 1, initialPlane, holes, null);
seenConcave = new MutableBoolean();
buildPolygonShape(rval, seenConcave, planetModel, filteredPointList, new BitSet(), 0, 1, initialPlane, holes, null);
return rval;
}
// No: do the complement and return that.
rval = new GeoCompositePolygon();
buildPolygonShape(rval, planetModel, filteredPointList, new BitSet(), 0, 1, new SidedPlane(initialPlane), holes, null);
seenConcave = new MutableBoolean();
buildPolygonShape(rval, seenConcave, planetModel, filteredPointList, new BitSet(), 0, 1, new SidedPlane(initialPlane), holes, null);
return rval;
} else {
// The testPoint was outside the shape. Was that intended?
@ -125,7 +129,8 @@ public class GeoPolygonFactory {
}
// No: return the complement
rval = new GeoCompositePolygon();
buildPolygonShape(rval, planetModel, filteredPointList, new BitSet(), 0, 1, new SidedPlane(initialPlane), holes, null);
seenConcave = new MutableBoolean();
buildPolygonShape(rval, seenConcave, planetModel, filteredPointList, new BitSet(), 0, 1, new SidedPlane(initialPlane), holes, null);
return rval;
}
}
@ -400,6 +405,7 @@ public class GeoPolygonFactory {
/** Build a GeoPolygon out of one concave part and multiple convex parts given points, starting edge, and whether starting edge is internal or not.
* @param rval is the composite polygon to add to.
* @param seenConcave is true if a concave polygon has been seen in this generation yet.
* @param planetModel is the planet model.
* @param pointsList is a list of the GeoPoints to build an arbitrary polygon out of.
* @param internalEdges specifies which edges are internal.
@ -419,8 +425,9 @@ public class GeoPolygonFactory {
* found in the interior of the shape we create here we return false, which is a signal that we chose
* our initial plane sidedness backwards.
*/
public static boolean buildPolygonShape(
static boolean buildPolygonShape(
final GeoCompositePolygon rval,
final MutableBoolean seenConcave,
final PlanetModel planetModel,
final List<GeoPoint> pointsList,
final BitSet internalEdges,
@ -433,7 +440,8 @@ public class GeoPolygonFactory {
// It could be the case that we need a concave polygon. So we need to try and look for that case
// as part of the general code for constructing complex polygons.
// Note that there can be only one concave polygon.
// Note that there can be only one concave polygon. This code will enforce that condition and will return
// false if it is violated.
// The code here must keep track of two lists of sided planes. The first list contains the planes consistent with
// a concave polygon. This list will grow and shrink. The second list is built starting at the current edge that
@ -585,30 +593,18 @@ public class GeoPolygonFactory {
// This should be the only problematic part of the polygon.
// We know that thePoint is on the "wrong" side of the edge -- that is, it's on the side that the
// edge is pointing at.
final List<GeoPoint> thirdPartPoints = new ArrayList<>();
final List<GeoPoint> thirdPartPoints = new ArrayList<>(3);
final BitSet thirdPartInternal = new BitSet();
thirdPartPoints.add(checkEdge.startPoint);
thirdPartInternal.set(0, checkEdge.isInternal);
thirdPartPoints.add(checkEdge.endPoint);
thirdPartInternal.set(1, true);
thirdPartPoints.add(thePoint);
thirdPartInternal.set(2, true);
//System.out.println("Doing convex part...");
if (buildPolygonShape(rval,
planetModel,
thirdPartPoints,
thirdPartInternal,
0,
1,
checkEdge.plane,
holes,
testPoint) == false) {
return false;
}
//System.out.println("...done convex part.");
assert checkEdge.plane.isWithin(thePoint) : "Point was on wrong side of complementary plane, so must be on the right side of the non-complementary plane!";
final GeoPolygon convexPart = new GeoConvexPolygon(planetModel, thirdPartPoints, holes, thirdPartInternal, true);
//System.out.println("convex part = "+convexPart);
rval.addShape(convexPart);
// ??? check if we get the sense right
// The part preceding the bad edge, back to thePoint, needs to be recursively
// processed. So, assemble what we need, which is basically a list of edges.
Edge loopEdge = edgeBuffer.getPrevious(checkEdge);
@ -626,6 +622,7 @@ public class GeoPolygonFactory {
firstPartInternal.set(i, true);
//System.out.println("Doing first part...");
if (buildPolygonShape(rval,
seenConcave,
planetModel,
firstPartPoints,
firstPartInternal,
@ -653,6 +650,7 @@ public class GeoPolygonFactory {
secondPartInternal.set(i, true);
//System.out.println("Doing second part...");
if (buildPolygonShape(rval,
seenConcave,
planetModel,
secondPartPoints,
secondPartInternal,
@ -673,7 +671,8 @@ public class GeoPolygonFactory {
// No violations found: we know it's a legal concave polygon.
// If there's anything left in the edge buffer, convert to concave polygon.
if (makeConcavePolygon(planetModel, rval, edgeBuffer, holes, testPoint) == false) {
//System.out.println("adding concave part");
if (makeConcavePolygon(planetModel, rval, seenConcave, edgeBuffer, holes, testPoint) == false) {
return false;
}
@ -684,6 +683,7 @@ public class GeoPolygonFactory {
* By this point, if there are any edges in the edgebuffer, they represent a concave polygon.
* @param planetModel is the planet model.
* @param rval is the composite polygon we're building.
* @param seenConcave is true if we've already seen a concave polygon.
* @param edgeBuffer is the edge buffer.
* @param holes is the optional list of holes.
* @param testPoint is the optional test point.
@ -691,13 +691,21 @@ public class GeoPolygonFactory {
*/
private static boolean makeConcavePolygon(final PlanetModel planetModel,
final GeoCompositePolygon rval,
final MutableBoolean seenConcave,
final EdgeBuffer edgeBuffer,
final List<GeoPolygon> holes,
final GeoPoint testPoint) {
if (edgeBuffer.size() == 0) {
return true;
}
if (seenConcave.value) {
throw new IllegalArgumentException("Illegal polygon; polygon edges intersect each other");
}
seenConcave.value = true;
// If there are less than three edges, something got messed up somehow. Don't know how this
// can happen but check.
if (edgeBuffer.size() < 3) {
@ -1107,24 +1115,34 @@ public class GeoPolygonFactory {
*/
public EdgeBuffer(final List<GeoPoint> pointList, final BitSet internalEdges, final int startPlaneStartIndex, final int startPlaneEndIndex, final SidedPlane startPlane) {
/*
System.out.println("Initial points:");
System.out.println("Start plane index: "+startPlaneStartIndex+" End plane index: "+startPlaneEndIndex+" Initial points:");
for (final GeoPoint p : pointList) {
System.out.println(" "+p);
}
*/
// We need to detect backtracks, and also situations where someone has tried to stitch together multiple segments into one long arc (> 180 degrees).
// To do this, every time we extend by a coplanar segment, we compute the total arc distance to the new endpoint, as
// well as a sum of the arc distances we've accumulated as we march forward. If these two numbers disagree, then
// we know there has been a backtrack or other anomaly.
// extend the edge, we compute the distance along the
final Edge startEdge = new Edge(pointList.get(startPlaneStartIndex), pointList.get(startPlaneEndIndex), startPlane, internalEdges.get(startPlaneStartIndex));
// Fill in the EdgeBuffer by walking around creating more stuff
Edge currentEdge = startEdge;
int startIndex = startPlaneStartIndex;
int endIndex = startPlaneEndIndex;
while (true) {
/*
System.out.println("For plane "+currentEdge.plane+", the following points are in/out:");
for (final GeoPoint p: pointList) {
System.out.println(" "+p+" is: "+(currentEdge.plane.isWithin(p)?"in":"out"));
}
*/
// Check termination condition
if (currentEdge.endPoint == startEdge.startPoint) {
// We finish here. Link the current edge to the start edge, and exit
previousEdges.put(startEdge, currentEdge);
nextEdges.put(currentEdge, startEdge);
edges.add(startEdge);
break;
}
// Compute the next edge
startIndex = endIndex;
endIndex++;
@ -1134,53 +1152,15 @@ public class GeoPolygonFactory {
// Get the next point
final GeoPoint newPoint = pointList.get(endIndex);
// Build the new edge
// We have to be sure that the point we use as a check does not lie on the plane.
// In order to meet that goal, we need to go hunting for a point that meets the criteria. If we don't
// find one, we've got a linear "polygon" that we cannot use.
// We need to know the sidedness of the new plane. The point we're going to be presenting to it has
// a certain relationship with the sided plane we already have for the current edge. If the current edge
// is colinear with the new edge, then we want to maintain the same relationship. If the new edge
// is not colinear, then we can use the new point's relationship with the current edge as our guide.
final boolean isNewPointWithin;
final GeoPoint pointToPresent;
if (currentEdge.plane.evaluateIsZero(newPoint)) {
// The new point is colinear with the current edge. We'll have to look backwards for the first point that isn't.
int checkPointIndex = -1;
// Compute the arc distance before we try to extend, so that we note backtracking when we see it
//double accumulatedDistance = newPoint.arcDistance(pointList.get(startIndex));
final Plane checkPlane = new Plane(pointList.get(startIndex), newPoint);
for (int i = 0; i < pointList.size(); i++) {
final int index = getLegalIndex(startIndex - 1 - i, pointList.size());
if (!checkPlane.evaluateIsZero(pointList.get(index))) {
checkPointIndex = index;
break;
} else {
//accumulatedDistance += pointList.get(getLegalIndex(index+1, pointList.size())).arcDistance(pointList.get(index));
//final double actualDistance = newPoint.arcDistance(pointList.get(index));
//if (Math.abs(actualDistance - accumulatedDistance) >= Vector.MINIMUM_RESOLUTION) {
// throw new IllegalArgumentException("polygon backtracks over itself");
//}
}
}
if (checkPointIndex == -1) {
throw new IllegalArgumentException("polygon is illegal (linear)");
}
pointToPresent = pointList.get(checkPointIndex);
isNewPointWithin = currentEdge.plane.isWithin(pointToPresent);
} else {
isNewPointWithin = currentEdge.plane.isWithin(newPoint);
pointToPresent = currentEdge.startPoint;
}
final boolean isNewPointWithin = currentEdge.plane.isWithin(newPoint);
final GeoPoint pointToPresent = currentEdge.startPoint;
final SidedPlane newPlane = new SidedPlane(pointToPresent, isNewPointWithin, pointList.get(startIndex), newPoint);
/*
System.out.println("For next plane, the following points are in/out:");
for (final GeoPoint p: pointList) {
System.out.println(" "+p+" is: "+(newPlane.isWithin(p)?"in":"out"));
}
*/
final Edge newEdge = new Edge(pointList.get(startIndex), pointList.get(endIndex), newPlane, internalEdges.get(startIndex));
// Link it in
@ -1189,13 +1169,6 @@ public class GeoPolygonFactory {
edges.add(newEdge);
currentEdge = newEdge;
if (currentEdge.endPoint == startEdge.startPoint) {
// We finish here. Link the current edge to the start edge, and exit
previousEdges.put(startEdge, currentEdge);
nextEdges.put(currentEdge, startEdge);
edges.add(startEdge);
break;
}
}
oneEdge = startEdge;
@ -1313,4 +1286,8 @@ public class GeoPolygonFactory {
}
static class MutableBoolean {
public boolean value = false;
}
}

View File

@ -29,7 +29,7 @@ public class XYZBounds implements Bounds {
* of the shape, and we cannot guarantee that without making MINIMUM_RESOLUTION
* unacceptably large.
*/
private static final double FUDGE_FACTOR = Vector.MINIMUM_RESOLUTION * 2.0;
private static final double FUDGE_FACTOR = Vector.MINIMUM_RESOLUTION * 500.0;
/** Minimum x */
private Double minX = null;

View File

@ -544,10 +544,13 @@ public class TestGeo3DPoint extends LuceneTestCase {
// Polygons
final boolean isClockwise = random().nextDouble() < 0.5;
try {
return Geo3DPoint.newPolygonQuery(field, makePoly(PlanetModel.WGS84,
final Query q = Geo3DPoint.newPolygonQuery(field, makePoly(PlanetModel.WGS84,
new GeoPoint(PlanetModel.WGS84, toRadians(GeoTestUtil.nextLatitude()), toRadians(GeoTestUtil.nextLongitude())),
isClockwise,
true));
//System.err.println("Generated: "+q);
//assertTrue(false);
return q;
} catch (IllegalArgumentException e) {
continue;
}
@ -793,6 +796,10 @@ public class TestGeo3DPoint extends LuceneTestCase {
GeoPoint unquantizedPoint = unquantizedPoints[id];
if (point != null && unquantizedPoint != null) {
GeoShape shape = ((PointInGeo3DShapeQuery)query).getShape();
XYZBounds bounds = new XYZBounds();
shape.getBounds(bounds);
XYZSolid solid = XYZSolidFactory.makeXYZSolid(PlanetModel.WGS84, bounds.getMinimumX(), bounds.getMaximumX(), bounds.getMinimumY(), bounds.getMaximumY(), bounds.getMinimumZ(), bounds.getMaximumZ());
boolean expected = ((deleted.contains(id) == false) && shape.isWithin(point));
if (hits.get(docID) != expected) {
StringBuilder b = new StringBuilder();
@ -801,13 +808,14 @@ public class TestGeo3DPoint extends LuceneTestCase {
} else {
b.append("FAIL: id=" + id + " should not have matched but did\n");
}
b.append(" shape=" + ((PointInGeo3DShapeQuery)query).getShape() + "\n");
b.append(" shape=" + shape + "\n");
b.append(" bounds=" + bounds + "\n");
b.append(" world bounds=(" +
" minX=" + PlanetModel.WGS84.getMinimumXValue() + " maxX=" + PlanetModel.WGS84.getMaximumXValue() +
" minY=" + PlanetModel.WGS84.getMinimumYValue() + " maxY=" + PlanetModel.WGS84.getMaximumYValue() +
" minZ=" + PlanetModel.WGS84.getMinimumZValue() + " maxZ=" + PlanetModel.WGS84.getMaximumZValue() + "\n");
b.append(" quantized point=" + point + " within shape? "+shape.isWithin(point)+"\n");
b.append(" unquantized point=" + unquantizedPoint + " within shape? "+shape.isWithin(unquantizedPoint)+"\n");
b.append(" quantized point=" + point + " within shape? "+shape.isWithin(point)+" within bounds? "+solid.isWithin(point)+"\n");
b.append(" unquantized point=" + unquantizedPoint + " within shape? "+shape.isWithin(unquantizedPoint)+" within bounds? "+solid.isWithin(unquantizedPoint)+"\n");
b.append(" docID=" + docID + " deleted?=" + deleted.contains(id) + "\n");
b.append(" query=" + query + "\n");
b.append(" explanation:\n " + explain("point", shape, point, unquantizedPoint, r, docID).replace("\n", "\n "));
@ -880,6 +888,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
* clockwise/counterclockwise rotation that way.
*/
protected static Polygon makePoly(final PlanetModel pm, final GeoPoint pole, final boolean clockwiseDesired, final boolean createHoles) {
// Polygon edges will be arranged around the provided pole, and holes will each have a pole selected within the parent
// polygon.
final int pointCount = TestUtil.nextInt(random(), 3, 10);

View File

@ -439,7 +439,76 @@ shape:
points.add(new GeoPoint(-0.1699323603224724, 0.8516746480592872, 0.4963385521664198));
points.add(new GeoPoint(0.2654788898359613, 0.7380222309164597, 0.6200740473100581));
final GeoPolygon p = GeoPolygonFactory.makeGeoPolygon(PlanetModel.WGS84, points, null);
boolean illegalArgumentException = false;
try {
final GeoPolygon p = GeoPolygonFactory.makeGeoPolygon(PlanetModel.WGS84, points, null);
} catch (IllegalArgumentException e) {
illegalArgumentException = true;
}
assertTrue(illegalArgumentException);
}
@Test
public void testPolygonFactoryCase2() {
/*
[[lat=-0.48522750470337056, lon=-1.7370471071224087([X=-0.14644023172524287, Y=-0.8727091042681705, Z=-0.4665895520487907])],
[lat=-0.4252164254406539, lon=-1.0929282311747601([X=0.41916238097763436, Y=-0.8093435958043177, Z=-0.4127428785664968])],
[lat=0.2055150822737076, lon=0.8094775925193464([X=0.6760197133035871, Y=0.7093859395658346, Z=0.20427109186920892])],
[lat=-0.504360159046884, lon=-1.27628468850318([X=0.25421329462858633, Y=-0.8380671569889917, Z=-0.4834077932502288])],
[lat=-0.11994023948700858, lon=0.07857194136150605([X=0.9908123546871113, Y=0.07801065055912473, Z=-0.11978097184039621])],
[lat=0.39346633764155237, lon=1.306697331415816([X=0.24124272064589647, Y=0.8921189226448045, Z=0.3836311592666308])],
[lat=-0.07741593942416389, lon=0.5334693210962216([X=0.8594122640512101, Y=0.50755758923985, Z=-0.07742360418968308])],
[lat=0.4654236264787552, lon=1.3013260557429494([X=0.2380080413677112, Y=0.8617612419312584, Z=0.4489988990508502])],
[lat=-1.2964641581620537, lon=-1.487600369139357([X=0.022467282495493006, Y=-0.26942922375508405, Z=-0.960688317984634])]]
*/
final List<GeoPoint> points = new ArrayList<>();
points.add(new GeoPoint(PlanetModel.WGS84, -0.48522750470337056, -1.7370471071224087));
points.add(new GeoPoint(PlanetModel.WGS84, -0.4252164254406539, -1.0929282311747601));
points.add(new GeoPoint(PlanetModel.WGS84, 0.2055150822737076, 0.8094775925193464));
points.add(new GeoPoint(PlanetModel.WGS84, -0.504360159046884, -1.27628468850318));
points.add(new GeoPoint(PlanetModel.WGS84, -0.11994023948700858, 0.07857194136150605));
points.add(new GeoPoint(PlanetModel.WGS84, 0.39346633764155237, 1.306697331415816));
points.add(new GeoPoint(PlanetModel.WGS84, -0.07741593942416389, 0.5334693210962216));
points.add(new GeoPoint(PlanetModel.WGS84, 0.4654236264787552, 1.3013260557429494));
points.add(new GeoPoint(PlanetModel.WGS84, -1.2964641581620537, -1.487600369139357));
boolean illegalArgumentException = false;
try {
final GeoPolygon p = GeoPolygonFactory.makeGeoPolygon(PlanetModel.WGS84, points, null);
} catch (IllegalArgumentException e) {
illegalArgumentException = true;
}
assertTrue(illegalArgumentException);
}
@Test
public void testPolygonFactoryCase3() {
/*
This one failed to be detected as convex:
[junit4] 1> convex part = GeoConvexPolygon: {planetmodel=PlanetModel.WGS84, points=
[[lat=0.39346633764155237, lon=1.306697331415816([X=0.24124272064589647, Y=0.8921189226448045, Z=0.3836311592666308])],
[lat=-0.4252164254406539, lon=-1.0929282311747601([X=0.41916238097763436, Y=-0.8093435958043177, Z=-0.4127428785664968])],
[lat=0.4654236264787552, lon=1.3013260557429494([X=0.2380080413677112, Y=0.8617612419312584, Z=0.4489988990508502])]], internalEdges={0, 1, 2}}
*/
final GeoPoint p3 = new GeoPoint(PlanetModel.WGS84, 0.39346633764155237, 1.306697331415816);
final GeoPoint p2 = new GeoPoint(PlanetModel.WGS84, -0.4252164254406539, -1.0929282311747601);
final GeoPoint p1 = new GeoPoint(PlanetModel.WGS84, 0.4654236264787552, 1.3013260557429494);
final List<GeoPoint> points = new ArrayList<>();
points.add(p3);
points.add(p2);
points.add(p1);
final BitSet internal = new BitSet();
final GeoCompositePolygon rval = new GeoCompositePolygon();
final GeoPolygonFactory.MutableBoolean mutableBoolean = new GeoPolygonFactory.MutableBoolean();
boolean result = GeoPolygonFactory.buildPolygonShape(rval, mutableBoolean, PlanetModel.WGS84, points, internal, 0, 1,
new SidedPlane(p1, p3, p2), new ArrayList<GeoPolygon>(), null);
assertFalse(mutableBoolean.value);
}
}

View File

@ -60,6 +60,19 @@ public abstract class CompressingCodec extends FilterCodec {
final int blockSize = random.nextBoolean() ? RandomInts.randomIntBetween(random, 1, 10) : RandomInts.randomIntBetween(random, 1, 1024);
return randomInstance(random, chunkSize, chunkDocs, false, blockSize);
}
/**
* Creates a random {@link CompressingCodec} with more reasonable parameters for big tests.
*/
public static CompressingCodec reasonableInstance(Random random) {
// e.g. defaults use 2^14 for FAST and ~ 2^16 for HIGH
final int chunkSize = TestUtil.nextInt(random, 1<<13, 1<<17);
// e.g. defaults use 128 for FAST and 512 for HIGH
final int chunkDocs = TestUtil.nextInt(random, 1<<6, 1<<10);
// e.g. defaults use 1024 for both cases
final int blockSize = TestUtil.nextInt(random, 1<<9, 1<<11);
return randomInstance(random, chunkSize, chunkDocs, false, blockSize);
}
/**
* Creates a random {@link CompressingCodec} that is using a segment suffix

View File

@ -98,6 +98,9 @@ Bug Fixes
* SOLR-9004: Fix "name" field type definition in films example. (Alexandre Rafalovitch via Varun Thacker)
* SOLR-8983: Cleanup clusterstate and replicas for a failed create collection request
(Varun Thacker, Anshum Gupta)
Optimizations
----------------------
* SOLR-8722: Don't force a full ZkStateReader refresh on every Overseer operation.
@ -610,6 +613,9 @@ Bug Fixes
* SOLR-8908: Fix to OnReconnect listener registration to allow listeners to deregister, such
as when a core is reloaded or deleted to avoid a memory leak. (Timothy Potter)
* SOLR-9007: Remove mention of the managed_schema_configs as valid config directory when creating
the collection for the SolrCloud example. (Timothy Potter)
======================= 5.5.0 =======================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release

View File

@ -1961,10 +1961,16 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler
}
processResponses(results, shardHandler, false, null, async, requestMap, Collections.emptySet());
log.debug("Finished create command on all shards for collection: "
+ collectionName);
if(results.get("failure") != null && ((SimpleOrderedMap)results.get("failure")).size() > 0) {
// Let's cleanup as we hit an exception
// We shouldn't be passing 'results' here for the cleanup as the response would then contain 'success'
// element, which may be interpreted by the user as a positive ack
cleanupCollection(collectionName, new NamedList());
log.info("Cleaned up artifacts for failed create collection for [" + collectionName + "]");
} else {
log.debug("Finished create command on all shards for collection: "
+ collectionName);
}
} catch (SolrException ex) {
throw ex;
} catch (Exception ex) {
@ -1972,6 +1978,15 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler
}
}
private void cleanupCollection(String collectionName, NamedList results) throws KeeperException, InterruptedException {
log.error("Cleaning up collection [" + collectionName + "]." );
Map<String, Object> props = makeMap(
Overseer.QUEUE_OPERATION, DELETE.toLower(),
NAME, collectionName);
deleteCollection(new ZkNodeProps(props), results);
}
private Map<Position, String> identifyNodes(ClusterState clusterState,
List<String> nodeList,
ZkNodeProps message,

View File

@ -77,7 +77,7 @@ import org.apache.solr.util.plugin.SolrCoreAware;
* In the ExampleFieldMutatingUpdateProcessorFactory configured below,
* fields will be mutated if the name starts with "foo" <i>or</i> "bar";
* <b>unless</b> the field name contains the substring "SKIP" <i>or</i>
* the fieldType is (or subclasses) DateField. Meaning a field named
* the fieldType is (or subclasses) TrieDateField. Meaning a field named
* "foo_SKIP" is guaranteed not to be selected, but a field named "bar_smith"
* that uses StrField will be selected.
* </p>

View File

@ -2638,14 +2638,13 @@ public class SolrCLI {
"How many replicas per shard would you like to create? [2] ", "a replication factor", 2, 1, 4);
echo("Please choose a configuration for the "+collectionName+" collection, available options are:");
cloudConfig =
prompt(readInput, "basic_configs, data_driven_schema_configs, sample_techproducts_configs, or managed_schema_configs ["+cloudConfig+"] ", cloudConfig);
String validConfigs = "basic_configs, data_driven_schema_configs, or sample_techproducts_configs ["+cloudConfig+"] ";
cloudConfig = prompt(readInput, validConfigs, cloudConfig);
// validate the cloudConfig name
while (!isValidConfig(configsetsDir, cloudConfig)) {
echo(cloudConfig+" is not a valid configuration directory! Please choose a configuration for the "+collectionName+" collection, available options are:");
cloudConfig =
prompt(readInput, "basic_configs, data_driven_schema_configs, sample_techproducts_configs, or managed_schema_configs ["+cloudConfig+"] ", cloudConfig);
cloudConfig = prompt(readInput, validConfigs, cloudConfig);
}
} else {
// must verify if default collection exists

View File

@ -26,8 +26,19 @@ import java.lang.management.ManagementFactory;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.util.LuceneTestCase.Slow;
@ -39,7 +50,6 @@ import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest.Create;
@ -374,6 +384,14 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
}
private NamedList<Object> makeRequest(String baseUrl, SolrRequest request, int socketTimeout)
throws SolrServerException, IOException {
try (SolrClient client = createNewSolrClient("", baseUrl)) {
((HttpSolrClient) client).setSoTimeout(socketTimeout);
return client.request(request);
}
}
private NamedList<Object> makeRequest(String baseUrl, SolrRequest request)
throws SolrServerException, IOException {
try (SolrClient client = createNewSolrClient("", baseUrl)) {
@ -524,8 +542,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
params.set(OverseerCollectionMessageHandler.CREATE_NODE_SET, nn1 + "," + nn2);
request = new QueryRequest(params);
request.setPath("/admin/collections");
gotExp = false;
NamedList<Object> resp = makeRequest(baseUrl, request);;
NamedList<Object> resp = makeRequest(baseUrl, request, 60000);
SimpleOrderedMap success = (SimpleOrderedMap) resp.get("success");
SimpleOrderedMap failure = (SimpleOrderedMap) resp.get("failure");

View File

@ -0,0 +1,84 @@
package org.apache.solr.cloud;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.ArrayList;
import java.util.Properties;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
import org.apache.solr.common.params.CoreAdminParams;
import org.junit.BeforeClass;
import org.junit.Test;
public class CreateCollectionCleanupTest extends SolrCloudTestCase {
protected static final String CLOUD_SOLR_XML_WITH_10S_CREATE_COLL_WAIT = "<solr>\n" +
"\n" +
" <str name=\"shareSchema\">${shareSchema:false}</str>\n" +
" <str name=\"configSetBaseDir\">${configSetBaseDir:configsets}</str>\n" +
" <str name=\"coreRootDirectory\">${coreRootDirectory:.}</str>\n" +
"\n" +
" <shardHandlerFactory name=\"shardHandlerFactory\" class=\"HttpShardHandlerFactory\">\n" +
" <str name=\"urlScheme\">${urlScheme:}</str>\n" +
" <int name=\"socketTimeout\">${socketTimeout:90000}</int>\n" +
" <int name=\"connTimeout\">${connTimeout:15000}</int>\n" +
" </shardHandlerFactory>\n" +
"\n" +
" <solrcloud>\n" +
" <str name=\"host\">127.0.0.1</str>\n" +
" <int name=\"hostPort\">${hostPort:8983}</int>\n" +
" <str name=\"hostContext\">${hostContext:solr}</str>\n" +
" <int name=\"zkClientTimeout\">${solr.zkclienttimeout:30000}</int>\n" +
" <bool name=\"genericCoreNodeNames\">${genericCoreNodeNames:true}</bool>\n" +
" <int name=\"leaderVoteWait\">10000</int>\n" +
" <int name=\"distribUpdateConnTimeout\">${distribUpdateConnTimeout:45000}</int>\n" +
" <int name=\"distribUpdateSoTimeout\">${distribUpdateSoTimeout:340000}</int>\n" +
" <int name=\"createCollectionWaitTimeTillActive\">${createCollectionWaitTimeTillActive:10}</int>\n" +
" </solrcloud>\n" +
" \n" +
"</solr>\n";
@BeforeClass
public static void createCluster() throws Exception {
configureCluster(1)
.addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
.withSolrXml(CLOUD_SOLR_XML_WITH_10S_CREATE_COLL_WAIT)
.configure();
}
@Test
public void testCreateCollectionCleanup() throws Exception {
final CloudSolrClient cloudClient = cluster.getSolrClient();
// Create a collection that would fail
CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection("foo","conf1",1,1);
Properties properties = new Properties();
properties.put(CoreAdminParams.DATA_DIR, "/some_invalid_dir/foo");
create.setProperties(properties);
CollectionAdminResponse rsp = create.process(cloudClient);
assertFalse(rsp.isSuccess());
// Confirm using LIST that the collection does not exist
CollectionAdminRequest.List list = CollectionAdminRequest.listCollections();
rsp = list.process(cloudClient);
assertFalse(((ArrayList) rsp.getResponse().get("collections")).contains("foo"));
}
}