mirror of https://github.com/apache/lucene.git
LUCENE-6766: Merge branch 'master' into index_sort
This commit is contained in:
commit
849fd54f2c
|
@ -119,38 +119,33 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
|
||||
@Override
|
||||
public boolean isWithin(final double x, final double y, final double z) {
|
||||
return isWithin(new Vector(x, y, z));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWithin(final Vector thePoint) {
|
||||
// If we're right on top of the point, we know the answer.
|
||||
if (testPoint.isNumericallyIdentical(thePoint)) {
|
||||
if (testPoint.isNumericallyIdentical(x, y, z)) {
|
||||
return testPointInSet;
|
||||
}
|
||||
|
||||
// If we're right on top of any of the test planes, we navigate solely on that plane.
|
||||
if (testPointFixedYPlane.evaluateIsZero(thePoint)) {
|
||||
if (testPointFixedYPlane.evaluateIsZero(x, y, z)) {
|
||||
// Use the XZ plane exclusively.
|
||||
final LinearCrossingEdgeIterator crossingEdgeIterator = new LinearCrossingEdgeIterator(testPointFixedYPlane, testPointFixedYAbovePlane, testPointFixedYBelowPlane, testPoint, thePoint);
|
||||
final LinearCrossingEdgeIterator crossingEdgeIterator = new LinearCrossingEdgeIterator(testPointFixedYPlane, testPointFixedYAbovePlane, testPointFixedYBelowPlane, x, y, z);
|
||||
// Traverse our way from the test point to the check point. Use the y tree because that's fixed.
|
||||
if (!yTree.traverse(crossingEdgeIterator, testPoint.y)) {
|
||||
// Endpoint is on edge
|
||||
return true;
|
||||
}
|
||||
return ((crossingEdgeIterator.crossingCount & 1) == 0)?testPointInSet:!testPointInSet;
|
||||
} else if (testPointFixedXPlane.evaluateIsZero(thePoint)) {
|
||||
} else if (testPointFixedXPlane.evaluateIsZero(x, y, z)) {
|
||||
// Use the YZ plane exclusively.
|
||||
final LinearCrossingEdgeIterator crossingEdgeIterator = new LinearCrossingEdgeIterator(testPointFixedXPlane, testPointFixedXAbovePlane, testPointFixedXBelowPlane, testPoint, thePoint);
|
||||
final LinearCrossingEdgeIterator crossingEdgeIterator = new LinearCrossingEdgeIterator(testPointFixedXPlane, testPointFixedXAbovePlane, testPointFixedXBelowPlane, x, y, z);
|
||||
// Traverse our way from the test point to the check point. Use the x tree because that's fixed.
|
||||
if (!xTree.traverse(crossingEdgeIterator, testPoint.x)) {
|
||||
// Endpoint is on edge
|
||||
return true;
|
||||
}
|
||||
return ((crossingEdgeIterator.crossingCount & 1) == 0)?testPointInSet:!testPointInSet;
|
||||
} else if (testPointFixedZPlane.evaluateIsZero(thePoint)) {
|
||||
} else if (testPointFixedZPlane.evaluateIsZero(x, y, z)) {
|
||||
// Use the XY plane exclusively.
|
||||
final LinearCrossingEdgeIterator crossingEdgeIterator = new LinearCrossingEdgeIterator(testPointFixedZPlane, testPointFixedZAbovePlane, testPointFixedZBelowPlane, testPoint, thePoint);
|
||||
final LinearCrossingEdgeIterator crossingEdgeIterator = new LinearCrossingEdgeIterator(testPointFixedZPlane, testPointFixedZAbovePlane, testPointFixedZBelowPlane, x, y, z);
|
||||
// Traverse our way from the test point to the check point. Use the z tree because that's fixed.
|
||||
if (!zTree.traverse(crossingEdgeIterator, testPoint.z)) {
|
||||
// Endpoint is on edge
|
||||
|
@ -159,10 +154,13 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
return ((crossingEdgeIterator.crossingCount & 1) == 0)?testPointInSet:!testPointInSet;
|
||||
} else {
|
||||
|
||||
// This is the expensive part!!
|
||||
// Changing the code below has an enormous impact on the queries per second we see with the benchmark.
|
||||
|
||||
// We need to use two planes to get there. We don't know which two planes will do it but we can figure it out.
|
||||
final Plane travelPlaneFixedX = new Plane(1.0, 0.0, 0.0, -thePoint.x);
|
||||
final Plane travelPlaneFixedY = new Plane(0.0, 1.0, 0.0, -thePoint.y);
|
||||
final Plane travelPlaneFixedZ = new Plane(0.0, 0.0, 1.0, -thePoint.z);
|
||||
final Plane travelPlaneFixedX = new Plane(1.0, 0.0, 0.0, -x);
|
||||
final Plane travelPlaneFixedY = new Plane(0.0, 1.0, 0.0, -y);
|
||||
final Plane travelPlaneFixedZ = new Plane(0.0, 0.0, 1.0, -z);
|
||||
|
||||
// Find the intersection points for each one of these and the complementary test point planes.
|
||||
final GeoPoint[] XIntersectionsY = travelPlaneFixedX.findIntersections(planetModel, testPointFixedYPlane);
|
||||
|
@ -186,11 +184,19 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
|
||||
for (final GeoPoint p : XIntersectionsY) {
|
||||
// Travel would be in YZ plane (fixed x) then in XZ (fixed y)
|
||||
final double newDistance = Math.abs(testPoint.x - p.x) + Math.abs(thePoint.y - p.y);
|
||||
// We compute distance we need to travel as a placeholder for the number of intersections we might encounter.
|
||||
//final double newDistance = p.arcDistance(testPoint) + p.arcDistance(thePoint);
|
||||
final double tpDelta1 = testPoint.x - p.x;
|
||||
final double tpDelta2 = testPoint.z - p.z;
|
||||
final double cpDelta1 = y - p.y;
|
||||
final double cpDelta2 = z - p.z;
|
||||
final double newDistance = tpDelta1 * tpDelta1 + tpDelta2 * tpDelta2 + cpDelta1 * cpDelta1 + cpDelta2 * cpDelta2;
|
||||
//final double newDistance = (testPoint.x - p.x) * (testPoint.x - p.x) + (testPoint.z - p.z) * (testPoint.z - p.z) + (thePoint.y - p.y) * (thePoint.y - p.y) + (thePoint.z - p.z) * (thePoint.z - p.z);
|
||||
//final double newDistance = Math.abs(testPoint.x - p.x) + Math.abs(thePoint.y - p.y);
|
||||
if (newDistance < bestDistance) {
|
||||
bestDistance = newDistance;
|
||||
firstLegValue = testPoint.y;
|
||||
secondLegValue = thePoint.x;
|
||||
secondLegValue = x;
|
||||
firstLegPlane = testPointFixedYPlane;
|
||||
firstLegAbovePlane = testPointFixedYAbovePlane;
|
||||
firstLegBelowPlane = testPointFixedYBelowPlane;
|
||||
|
@ -202,11 +208,18 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
}
|
||||
for (final GeoPoint p : XIntersectionsZ) {
|
||||
// Travel would be in YZ plane (fixed x) then in XY (fixed z)
|
||||
final double newDistance = Math.abs(testPoint.x - p.x) + Math.abs(thePoint.z - p.z);
|
||||
//final double newDistance = p.arcDistance(testPoint) + p.arcDistance(thePoint);
|
||||
final double tpDelta1 = testPoint.x - p.x;
|
||||
final double tpDelta2 = testPoint.y - p.y;
|
||||
final double cpDelta1 = y - p.y;
|
||||
final double cpDelta2 = z - p.z;
|
||||
final double newDistance = tpDelta1 * tpDelta1 + tpDelta2 * tpDelta2 + cpDelta1 * cpDelta1 + cpDelta2 * cpDelta2;
|
||||
//final double newDistance = (testPoint.x - p.x) * (testPoint.x - p.x) + (testPoint.y - p.y) * (testPoint.y - p.y) + (thePoint.y - p.y) * (thePoint.y - p.y) + (thePoint.z - p.z) * (thePoint.z - p.z);
|
||||
//final double newDistance = Math.abs(testPoint.x - p.x) + Math.abs(thePoint.z - p.z);
|
||||
if (newDistance < bestDistance) {
|
||||
bestDistance = newDistance;
|
||||
firstLegValue = testPoint.z;
|
||||
secondLegValue = thePoint.x;
|
||||
secondLegValue = x;
|
||||
firstLegPlane = testPointFixedZPlane;
|
||||
firstLegAbovePlane = testPointFixedZAbovePlane;
|
||||
firstLegBelowPlane = testPointFixedZBelowPlane;
|
||||
|
@ -218,11 +231,18 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
}
|
||||
for (final GeoPoint p : YIntersectionsX) {
|
||||
// Travel would be in XZ plane (fixed y) then in YZ (fixed x)
|
||||
final double newDistance = Math.abs(testPoint.y - p.y) + Math.abs(thePoint.x - p.x);
|
||||
//final double newDistance = p.arcDistance(testPoint) + p.arcDistance(thePoint);
|
||||
final double tpDelta1 = testPoint.y - p.y;
|
||||
final double tpDelta2 = testPoint.z - p.z;
|
||||
final double cpDelta1 = x - p.x;
|
||||
final double cpDelta2 = z - p.z;
|
||||
final double newDistance = tpDelta1 * tpDelta1 + tpDelta2 * tpDelta2 + cpDelta1 * cpDelta1 + cpDelta2 * cpDelta2;
|
||||
//final double newDistance = (testPoint.y - p.y) * (testPoint.y - p.y) + (testPoint.z - p.z) * (testPoint.z - p.z) + (thePoint.x - p.x) * (thePoint.x - p.x) + (thePoint.z - p.z) * (thePoint.z - p.z);
|
||||
//final double newDistance = Math.abs(testPoint.y - p.y) + Math.abs(thePoint.x - p.x);
|
||||
if (newDistance < bestDistance) {
|
||||
bestDistance = newDistance;
|
||||
firstLegValue = testPoint.x;
|
||||
secondLegValue = thePoint.y;
|
||||
secondLegValue = y;
|
||||
firstLegPlane = testPointFixedXPlane;
|
||||
firstLegAbovePlane = testPointFixedXAbovePlane;
|
||||
firstLegBelowPlane = testPointFixedXBelowPlane;
|
||||
|
@ -234,11 +254,18 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
}
|
||||
for (final GeoPoint p : YIntersectionsZ) {
|
||||
// Travel would be in XZ plane (fixed y) then in XY (fixed z)
|
||||
final double newDistance = Math.abs(testPoint.y - p.y) + Math.abs(thePoint.z - p.z);
|
||||
//final double newDistance = p.arcDistance(testPoint) + p.arcDistance(thePoint);
|
||||
final double tpDelta1 = testPoint.x - p.x;
|
||||
final double tpDelta2 = testPoint.y - p.y;
|
||||
final double cpDelta1 = x - p.x;
|
||||
final double cpDelta2 = z - p.z;
|
||||
final double newDistance = tpDelta1 * tpDelta1 + tpDelta2 * tpDelta2 + cpDelta1 * cpDelta1 + cpDelta2 * cpDelta2;
|
||||
//final double newDistance = (testPoint.x - p.x) * (testPoint.x - p.x) + (testPoint.y - p.y) * (testPoint.y - p.y) + (thePoint.x - p.x) * (thePoint.x - p.x) + (thePoint.z - p.z) * (thePoint.z - p.z);
|
||||
//final double newDistance = Math.abs(testPoint.y - p.y) + Math.abs(thePoint.z - p.z);
|
||||
if (newDistance < bestDistance) {
|
||||
bestDistance = newDistance;
|
||||
firstLegValue = testPoint.z;
|
||||
secondLegValue = thePoint.y;
|
||||
secondLegValue = y;
|
||||
firstLegPlane = testPointFixedZPlane;
|
||||
firstLegAbovePlane = testPointFixedZAbovePlane;
|
||||
firstLegBelowPlane = testPointFixedZBelowPlane;
|
||||
|
@ -250,11 +277,18 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
}
|
||||
for (final GeoPoint p : ZIntersectionsX) {
|
||||
// Travel would be in XY plane (fixed z) then in YZ (fixed x)
|
||||
final double newDistance = Math.abs(testPoint.z - p.z) + Math.abs(thePoint.x - p.x);
|
||||
//final double newDistance = p.arcDistance(testPoint) + p.arcDistance(thePoint);
|
||||
final double tpDelta1 = testPoint.y - p.y;
|
||||
final double tpDelta2 = testPoint.z - p.z;
|
||||
final double cpDelta1 = y - p.y;
|
||||
final double cpDelta2 = x - p.x;
|
||||
final double newDistance = tpDelta1 * tpDelta1 + tpDelta2 * tpDelta2 + cpDelta1 * cpDelta1 + cpDelta2 * cpDelta2;
|
||||
//final double newDistance = (testPoint.y - p.y) * (testPoint.y - p.y) + (testPoint.z - p.z) * (testPoint.z - p.z) + (thePoint.y - p.y) * (thePoint.y - p.y) + (thePoint.x - p.x) * (thePoint.x - p.x);
|
||||
//final double newDistance = Math.abs(testPoint.z - p.z) + Math.abs(thePoint.x - p.x);
|
||||
if (newDistance < bestDistance) {
|
||||
bestDistance = newDistance;
|
||||
firstLegValue = testPoint.x;
|
||||
secondLegValue = thePoint.z;
|
||||
secondLegValue = z;
|
||||
firstLegPlane = testPointFixedXPlane;
|
||||
firstLegAbovePlane = testPointFixedXAbovePlane;
|
||||
firstLegBelowPlane = testPointFixedXBelowPlane;
|
||||
|
@ -266,11 +300,18 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
}
|
||||
for (final GeoPoint p : ZIntersectionsY) {
|
||||
// Travel would be in XY plane (fixed z) then in XZ (fixed y)
|
||||
final double newDistance = Math.abs(testPoint.z - p.z) + Math.abs(thePoint.y - p.y);
|
||||
//final double newDistance = p.arcDistance(testPoint) + p.arcDistance(thePoint);
|
||||
final double tpDelta1 = testPoint.x - p.x;
|
||||
final double tpDelta2 = testPoint.z - p.z;
|
||||
final double cpDelta1 = y - p.y;
|
||||
final double cpDelta2 = x - p.x;
|
||||
final double newDistance = tpDelta1 * tpDelta1 + tpDelta2 * tpDelta2 + cpDelta1 * cpDelta1 + cpDelta2 * cpDelta2;
|
||||
//final double newDistance = (testPoint.x - p.x) * (testPoint.x - p.x) + (testPoint.z - p.z) * (testPoint.z - p.z) + (thePoint.y - p.y) * (thePoint.y - p.y) + (thePoint.x - p.x) * (thePoint.x - p.x);
|
||||
//final double newDistance = Math.abs(testPoint.z - p.z) + Math.abs(thePoint.y - p.y);
|
||||
if (newDistance < bestDistance) {
|
||||
bestDistance = newDistance;
|
||||
firstLegValue = testPoint.y;
|
||||
secondLegValue = thePoint.z;
|
||||
secondLegValue = z;
|
||||
firstLegPlane = testPointFixedYPlane;
|
||||
firstLegAbovePlane = testPointFixedYAbovePlane;
|
||||
firstLegBelowPlane = testPointFixedYBelowPlane;
|
||||
|
@ -284,7 +325,7 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
assert bestDistance > 0.0 : "Best distance should not be zero unless on single plane";
|
||||
assert bestDistance < Double.MAX_VALUE : "Couldn't find an intersection point of any kind";
|
||||
|
||||
final DualCrossingEdgeIterator edgeIterator = new DualCrossingEdgeIterator(firstLegPlane, firstLegAbovePlane, firstLegBelowPlane, secondLegPlane, testPoint, thePoint, intersectionPoint);
|
||||
final DualCrossingEdgeIterator edgeIterator = new DualCrossingEdgeIterator(firstLegPlane, firstLegAbovePlane, firstLegBelowPlane, secondLegPlane, x, y, z, intersectionPoint);
|
||||
if (!firstLegTree.traverse(edgeIterator, firstLegValue)) {
|
||||
return true;
|
||||
}
|
||||
|
@ -662,23 +703,27 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
private final Plane belowPlane;
|
||||
private final Membership bound1;
|
||||
private final Membership bound2;
|
||||
private final Vector thePoint;
|
||||
private final double thePointX;
|
||||
private final double thePointY;
|
||||
private final double thePointZ;
|
||||
|
||||
public int crossingCount = 0;
|
||||
|
||||
public LinearCrossingEdgeIterator(final Plane plane, final Plane abovePlane, final Plane belowPlane, final Vector testPoint, final Vector thePoint) {
|
||||
public LinearCrossingEdgeIterator(final Plane plane, final Plane abovePlane, final Plane belowPlane, final double thePointX, final double thePointY, final double thePointZ) {
|
||||
this.plane = plane;
|
||||
this.abovePlane = abovePlane;
|
||||
this.belowPlane = belowPlane;
|
||||
this.bound1 = new SidedPlane(thePoint, plane, testPoint);
|
||||
this.bound2 = new SidedPlane(testPoint, plane, thePoint);
|
||||
this.thePoint = thePoint;
|
||||
this.bound1 = new SidedPlane(thePointX, thePointY, thePointZ, plane, testPoint);
|
||||
this.bound2 = new SidedPlane(testPoint, plane, thePointX, thePointY, thePointZ);
|
||||
this.thePointX = thePointX;
|
||||
this.thePointY = thePointY;
|
||||
this.thePointZ = thePointZ;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean matches(final Edge edge) {
|
||||
// Early exit if the point is on the edge.
|
||||
if (thePoint != null && edge.plane.evaluateIsZero(thePoint) && edge.startPlane.isWithin(thePoint) && edge.endPlane.isWithin(thePoint)) {
|
||||
if (edge.plane.evaluateIsZero(thePointX, thePointY, thePointZ) && edge.startPlane.isWithin(thePointX, thePointY, thePointZ) && edge.endPlane.isWithin(thePointX, thePointY, thePointZ)) {
|
||||
return false;
|
||||
}
|
||||
final GeoPoint[] crossingPoints = plane.findCrossings(planetModel, edge.plane, bound1, bound2, edge.startPlane, edge.endPlane);
|
||||
|
@ -815,12 +860,12 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
private boolean isSecondLeg = false;
|
||||
|
||||
private final Plane testPointPlane;
|
||||
private final Plane testPointInsidePlane;
|
||||
private final Plane testPointOutsidePlane;
|
||||
private final Plane testPointAbovePlane;
|
||||
private final Plane testPointBelowPlane;
|
||||
private final Plane travelPlane;
|
||||
private final Plane travelInsidePlane;
|
||||
private final Plane travelOutsidePlane;
|
||||
private final Vector thePoint;
|
||||
private final double thePointX;
|
||||
private final double thePointY;
|
||||
private final double thePointZ;
|
||||
|
||||
private final GeoPoint intersectionPoint;
|
||||
|
||||
|
@ -829,16 +874,29 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
private final SidedPlane testPointOtherCutoffPlane;
|
||||
private final SidedPlane checkPointOtherCutoffPlane;
|
||||
|
||||
private final SidedPlane insideTestPointCutoffPlane;
|
||||
private final SidedPlane insideTravelCutoffPlane;
|
||||
// These are computed on an as-needed basis
|
||||
|
||||
private boolean computedInsideOutside = false;
|
||||
private Plane testPointInsidePlane;
|
||||
private Plane testPointOutsidePlane;
|
||||
private Plane travelInsidePlane;
|
||||
private Plane travelOutsidePlane;
|
||||
private SidedPlane insideTestPointCutoffPlane;
|
||||
private SidedPlane insideTravelCutoffPlane;
|
||||
|
||||
// The counter
|
||||
|
||||
public int crossingCount = 0;
|
||||
|
||||
public DualCrossingEdgeIterator(final Plane testPointPlane, final Plane testPointAbovePlane, final Plane testPointBelowPlane,
|
||||
final Plane travelPlane, final Vector testPoint, final Vector thePoint, final GeoPoint intersectionPoint) {
|
||||
final Plane travelPlane, final double thePointX, final double thePointY, final double thePointZ, final GeoPoint intersectionPoint) {
|
||||
this.testPointPlane = testPointPlane;
|
||||
this.testPointAbovePlane = testPointAbovePlane;
|
||||
this.testPointBelowPlane = testPointBelowPlane;
|
||||
this.travelPlane = travelPlane;
|
||||
this.thePoint = thePoint;
|
||||
this.thePointX = thePointX;
|
||||
this.thePointY = thePointY;
|
||||
this.thePointZ = thePointZ;
|
||||
this.intersectionPoint = intersectionPoint;
|
||||
|
||||
//System.err.println("Intersection point = "+intersectionPoint);
|
||||
|
@ -847,73 +905,74 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
assert testPointPlane.evaluateIsZero(intersectionPoint) : "intersection point must be on test point plane";
|
||||
|
||||
assert !testPoint.isNumericallyIdentical(intersectionPoint) : "test point is the same as intersection point";
|
||||
assert !thePoint.isNumericallyIdentical(intersectionPoint) : "check point is same is intersection point";
|
||||
assert !intersectionPoint.isNumericallyIdentical(thePointX, thePointY, thePointZ) : "check point is same is intersection point";
|
||||
|
||||
this.testPointCutoffPlane = new SidedPlane(intersectionPoint, testPointPlane, testPoint);
|
||||
this.checkPointCutoffPlane = new SidedPlane(intersectionPoint, travelPlane, thePoint);
|
||||
this.checkPointCutoffPlane = new SidedPlane(intersectionPoint, travelPlane, thePointX, thePointY, thePointZ);
|
||||
this.testPointOtherCutoffPlane = new SidedPlane(testPoint, testPointPlane, intersectionPoint);
|
||||
this.checkPointOtherCutoffPlane = new SidedPlane(thePoint, travelPlane, intersectionPoint);
|
||||
|
||||
// Convert travel plane to a sided plane
|
||||
final Membership intersectionBound1 = new SidedPlane(testPoint, travelPlane, travelPlane.D);
|
||||
// Convert testPoint plane to a sided plane
|
||||
final Membership intersectionBound2 = new SidedPlane(thePoint, testPointPlane, testPointPlane.D);
|
||||
this.checkPointOtherCutoffPlane = new SidedPlane(thePointX, thePointY, thePointZ, travelPlane, intersectionPoint);
|
||||
|
||||
// Sanity check
|
||||
assert testPointCutoffPlane.isWithin(intersectionPoint) : "intersection must be within testPointCutoffPlane";
|
||||
assert testPointOtherCutoffPlane.isWithin(intersectionPoint) : "intersection must be within testPointOtherCutoffPlane";
|
||||
assert checkPointCutoffPlane.isWithin(intersectionPoint) : "intersection must be within checkPointCutoffPlane";
|
||||
assert checkPointOtherCutoffPlane.isWithin(intersectionPoint) : "intersection must be within checkPointOtherCutoffPlane";
|
||||
assert intersectionBound1.isWithin(intersectionPoint) : "intersection must be within intersectionBound1";
|
||||
assert intersectionBound2.isWithin(intersectionPoint) : "intersection must be within intersectionBound2";
|
||||
|
||||
// Figure out which of the above/below planes are inside vs. outside. To do this,
|
||||
// we look for the point that is within the bounds of the testPointPlane and travelPlane. The two sides that intersected there are the inside
|
||||
// borders.
|
||||
final Plane travelAbovePlane = new Plane(travelPlane, true);
|
||||
final Plane travelBelowPlane = new Plane(travelPlane, false);
|
||||
|
||||
final GeoPoint[] aboveAbove = travelAbovePlane.findIntersections(planetModel, testPointAbovePlane, intersectionBound1, intersectionBound2);
|
||||
assert aboveAbove != null : "Above + above should not be coplanar";
|
||||
final GeoPoint[] aboveBelow = travelAbovePlane.findIntersections(planetModel, testPointBelowPlane, intersectionBound1, intersectionBound2);
|
||||
assert aboveBelow != null : "Above + below should not be coplanar";
|
||||
final GeoPoint[] belowBelow = travelBelowPlane.findIntersections(planetModel, testPointBelowPlane, intersectionBound1, intersectionBound2);
|
||||
assert belowBelow != null : "Below + below should not be coplanar";
|
||||
final GeoPoint[] belowAbove = travelBelowPlane.findIntersections(planetModel, testPointAbovePlane, intersectionBound1, intersectionBound2);
|
||||
assert belowAbove != null : "Below + above should not be coplanar";
|
||||
}
|
||||
|
||||
protected void computeInsideOutside() {
|
||||
if (!computedInsideOutside) {
|
||||
// Convert travel plane to a sided plane
|
||||
final Membership intersectionBound1 = new SidedPlane(testPoint, travelPlane, travelPlane.D);
|
||||
// Convert testPoint plane to a sided plane
|
||||
final Membership intersectionBound2 = new SidedPlane(thePointX, thePointY, thePointZ, testPointPlane, testPointPlane.D);
|
||||
|
||||
assert ((aboveAbove.length > 0)?1:0) + ((aboveBelow.length > 0)?1:0) + ((belowBelow.length > 0)?1:0) + ((belowAbove.length > 0)?1:0) == 1 : "Can be exactly one inside point, instead was: aa="+aboveAbove.length+" ab=" + aboveBelow.length+" bb="+ belowBelow.length+" ba=" + belowAbove.length;
|
||||
|
||||
final GeoPoint insideIntersection;
|
||||
if (aboveAbove.length > 0) {
|
||||
travelInsidePlane = travelAbovePlane;
|
||||
testPointInsidePlane = testPointAbovePlane;
|
||||
travelOutsidePlane = travelBelowPlane;
|
||||
testPointOutsidePlane = testPointBelowPlane;
|
||||
insideIntersection = aboveAbove[0];
|
||||
} else if (aboveBelow.length > 0) {
|
||||
travelInsidePlane = travelAbovePlane;
|
||||
testPointInsidePlane = testPointBelowPlane;
|
||||
travelOutsidePlane = travelBelowPlane;
|
||||
testPointOutsidePlane = testPointAbovePlane;
|
||||
insideIntersection = aboveBelow[0];
|
||||
} else if (belowBelow.length > 0) {
|
||||
travelInsidePlane = travelBelowPlane;
|
||||
testPointInsidePlane = testPointBelowPlane;
|
||||
travelOutsidePlane = travelAbovePlane;
|
||||
testPointOutsidePlane = testPointAbovePlane;
|
||||
insideIntersection = belowBelow[0];
|
||||
} else {
|
||||
travelInsidePlane = travelBelowPlane;
|
||||
testPointInsidePlane = testPointAbovePlane;
|
||||
travelOutsidePlane = travelAbovePlane;
|
||||
testPointOutsidePlane = testPointBelowPlane;
|
||||
insideIntersection = belowAbove[0];
|
||||
assert intersectionBound1.isWithin(intersectionPoint) : "intersection must be within intersectionBound1";
|
||||
assert intersectionBound2.isWithin(intersectionPoint) : "intersection must be within intersectionBound2";
|
||||
|
||||
// Figure out which of the above/below planes are inside vs. outside. To do this,
|
||||
// we look for the point that is within the bounds of the testPointPlane and travelPlane. The two sides that intersected there are the inside
|
||||
// borders.
|
||||
final Plane travelAbovePlane = new Plane(travelPlane, true);
|
||||
final Plane travelBelowPlane = new Plane(travelPlane, false);
|
||||
|
||||
final GeoPoint[] aboveAbove = travelAbovePlane.findIntersections(planetModel, testPointAbovePlane, intersectionBound1, intersectionBound2);
|
||||
assert aboveAbove != null : "Above + above should not be coplanar";
|
||||
final GeoPoint[] aboveBelow = travelAbovePlane.findIntersections(planetModel, testPointBelowPlane, intersectionBound1, intersectionBound2);
|
||||
assert aboveBelow != null : "Above + below should not be coplanar";
|
||||
final GeoPoint[] belowBelow = travelBelowPlane.findIntersections(planetModel, testPointBelowPlane, intersectionBound1, intersectionBound2);
|
||||
assert belowBelow != null : "Below + below should not be coplanar";
|
||||
final GeoPoint[] belowAbove = travelBelowPlane.findIntersections(planetModel, testPointAbovePlane, intersectionBound1, intersectionBound2);
|
||||
assert belowAbove != null : "Below + above should not be coplanar";
|
||||
|
||||
assert ((aboveAbove.length > 0)?1:0) + ((aboveBelow.length > 0)?1:0) + ((belowBelow.length > 0)?1:0) + ((belowAbove.length > 0)?1:0) == 1 : "Can be exactly one inside point, instead was: aa="+aboveAbove.length+" ab=" + aboveBelow.length+" bb="+ belowBelow.length+" ba=" + belowAbove.length;
|
||||
|
||||
if (aboveAbove.length > 0) {
|
||||
travelInsidePlane = travelAbovePlane;
|
||||
testPointInsidePlane = testPointAbovePlane;
|
||||
travelOutsidePlane = travelBelowPlane;
|
||||
testPointOutsidePlane = testPointBelowPlane;
|
||||
} else if (aboveBelow.length > 0) {
|
||||
travelInsidePlane = travelAbovePlane;
|
||||
testPointInsidePlane = testPointBelowPlane;
|
||||
travelOutsidePlane = travelBelowPlane;
|
||||
testPointOutsidePlane = testPointAbovePlane;
|
||||
} else if (belowBelow.length > 0) {
|
||||
travelInsidePlane = travelBelowPlane;
|
||||
testPointInsidePlane = testPointBelowPlane;
|
||||
travelOutsidePlane = travelAbovePlane;
|
||||
testPointOutsidePlane = testPointAbovePlane;
|
||||
} else {
|
||||
travelInsidePlane = travelBelowPlane;
|
||||
testPointInsidePlane = testPointAbovePlane;
|
||||
travelOutsidePlane = travelAbovePlane;
|
||||
testPointOutsidePlane = testPointBelowPlane;
|
||||
}
|
||||
|
||||
insideTravelCutoffPlane = new SidedPlane(thePointX, thePointY, thePointZ, testPointInsidePlane, testPointInsidePlane.D);
|
||||
insideTestPointCutoffPlane = new SidedPlane(testPoint, travelInsidePlane, travelInsidePlane.D);
|
||||
computedInsideOutside = true;
|
||||
}
|
||||
|
||||
insideTravelCutoffPlane = new SidedPlane(thePoint, testPointInsidePlane, testPointInsidePlane.D);
|
||||
insideTestPointCutoffPlane = new SidedPlane(testPoint, travelInsidePlane, travelInsidePlane.D);
|
||||
|
||||
}
|
||||
|
||||
public void setSecondLeg() {
|
||||
|
@ -924,7 +983,7 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
public boolean matches(final Edge edge) {
|
||||
//System.err.println("Processing edge "+edge+", startpoint="+edge.startPoint+" endpoint="+edge.endPoint);
|
||||
// Early exit if the point is on the edge.
|
||||
if (thePoint != null && edge.plane.evaluateIsZero(thePoint) && edge.startPlane.isWithin(thePoint) && edge.endPlane.isWithin(thePoint)) {
|
||||
if (edge.plane.evaluateIsZero(thePointX, thePointY, thePointZ) && edge.startPlane.isWithin(thePointX, thePointY, thePointZ) && edge.endPlane.isWithin(thePointX, thePointY, thePointZ)) {
|
||||
//System.err.println(" Check point is on edge: isWithin = true");
|
||||
return false;
|
||||
}
|
||||
|
@ -982,29 +1041,11 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
|
||||
// Plane crossing, either first leg or second leg
|
||||
|
||||
final Plane plane;
|
||||
final Plane insidePlane;
|
||||
final Plane outsidePlane;
|
||||
final SidedPlane bound1;
|
||||
final SidedPlane bound2;
|
||||
if (isSecondLeg) {
|
||||
plane = travelPlane;
|
||||
insidePlane = travelInsidePlane;
|
||||
outsidePlane = travelOutsidePlane;
|
||||
bound1 = checkPointCutoffPlane;
|
||||
bound2 = checkPointOtherCutoffPlane;
|
||||
} else {
|
||||
plane = testPointPlane;
|
||||
insidePlane = testPointInsidePlane;
|
||||
outsidePlane = testPointOutsidePlane;
|
||||
bound1 = testPointCutoffPlane;
|
||||
bound2 = testPointOtherCutoffPlane;
|
||||
}
|
||||
|
||||
if (crossingPoint.isNumericallyIdentical(edge.startPoint)) {
|
||||
//System.err.println(" Crossing point = edge.startPoint");
|
||||
// We have to figure out if this crossing should be counted.
|
||||
|
||||
computeInsideOutside();
|
||||
|
||||
// Does the crossing for this edge go up, or down? Or can't we tell?
|
||||
final GeoPoint[] insideTestPointPlaneIntersections = testPointInsidePlane.findIntersections(planetModel, edge.plane, edge.startPlane, edge.endPlane, insideTestPointCutoffPlane);
|
||||
final GeoPoint[] insideTravelPlaneIntersections = travelInsidePlane.findIntersections(planetModel, edge.plane, edge.startPlane, edge.endPlane, insideTravelCutoffPlane);
|
||||
|
@ -1050,7 +1091,12 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
// a decision whether to count or not based on that.
|
||||
|
||||
// Compute the crossing points of this other edge.
|
||||
final GeoPoint[] otherCrossingPoints = plane.findCrossings(planetModel, assessEdge.plane, bound1, bound2, assessEdge.startPlane, assessEdge.endPlane);
|
||||
final GeoPoint[] otherCrossingPoints;
|
||||
if (isSecondLeg) {
|
||||
otherCrossingPoints = travelPlane.findCrossings(planetModel, assessEdge.plane, checkPointCutoffPlane, checkPointOtherCutoffPlane, assessEdge.startPlane, assessEdge.endPlane);
|
||||
} else {
|
||||
otherCrossingPoints = testPointPlane.findCrossings(planetModel, assessEdge.plane, testPointCutoffPlane, testPointOtherCutoffPlane, assessEdge.startPlane, assessEdge.endPlane);
|
||||
}
|
||||
|
||||
// Look for a matching endpoint. If the other endpoint doesn't show up, it is either out of bounds (in which case the
|
||||
// transition won't be counted for that edge), or it is not a crossing for that edge (so, same conclusion).
|
||||
|
@ -1079,7 +1125,8 @@ class GeoComplexPolygon extends GeoBasePolygon {
|
|||
} else if (crossingPoint.isNumericallyIdentical(edge.endPoint)) {
|
||||
//System.err.println(" Crossing point = edge.endPoint");
|
||||
// Figure out if the crossing should be counted.
|
||||
|
||||
computeInsideOutside();
|
||||
|
||||
// Does the crossing for this edge go up, or down? Or can't we tell?
|
||||
final GeoPoint[] insideTestPointPlaneIntersections = testPointInsidePlane.findIntersections(planetModel, edge.plane, edge.startPlane, edge.endPlane, insideTestPointCutoffPlane);
|
||||
final GeoPoint[] insideTravelPlaneIntersections = travelInsidePlane.findIntersections(planetModel, edge.plane, edge.startPlane, edge.endPlane, insideTravelCutoffPlane);
|
||||
|
|
|
@ -121,7 +121,7 @@ public class GeoPoint extends Vector {
|
|||
* @param v is the second point.
|
||||
* @return the angle, in radians, between the two points.
|
||||
*/
|
||||
public double arcDistance(final GeoPoint v) {
|
||||
public double arcDistance(final Vector v) {
|
||||
return Tools.safeAcos(dotProduct(v)/(magnitude() * v.magnitude()));
|
||||
}
|
||||
|
||||
|
|
|
@ -49,6 +49,19 @@ public class Plane extends Vector {
|
|||
this.D = D;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a plane through two points and origin.
|
||||
*
|
||||
* @param A is the first point (origin based).
|
||||
* @param BX is the second point X (origin based).
|
||||
* @param BY is the second point Y (origin based).
|
||||
* @param BZ is the second point Z (origin based).
|
||||
*/
|
||||
public Plane(final Vector A, final double BX, final double BY, final double BZ) {
|
||||
super(A, BX, BY, BZ);
|
||||
D = 0.0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a plane through two points and origin.
|
||||
*
|
||||
|
@ -666,8 +679,10 @@ public class Plane extends Vector {
|
|||
protected GeoPoint[] findIntersections(final PlanetModel planetModel, final Plane q, final Membership[] bounds, final Membership[] moreBounds) {
|
||||
//System.err.println("Looking for intersection between plane "+this+" and plane "+q+" within bounds");
|
||||
// Unnormalized, unchecked...
|
||||
final Vector lineVector = new Vector(y * q.z - z * q.y, z * q.x - x * q.z, x * q.y - y * q.x);
|
||||
if (Math.abs(lineVector.x) < MINIMUM_RESOLUTION && Math.abs(lineVector.y) < MINIMUM_RESOLUTION && Math.abs(lineVector.z) < MINIMUM_RESOLUTION) {
|
||||
final double lineVectorX = y * q.z - z * q.y;
|
||||
final double lineVectorY = z * q.x - x * q.z;
|
||||
final double lineVectorZ = x * q.y - y * q.x;
|
||||
if (Math.abs(lineVectorX) < MINIMUM_RESOLUTION && Math.abs(lineVectorY) < MINIMUM_RESOLUTION && Math.abs(lineVectorZ) < MINIMUM_RESOLUTION) {
|
||||
// Degenerate case: parallel planes
|
||||
//System.err.println(" planes are parallel - no intersection");
|
||||
return NO_POINTS;
|
||||
|
@ -738,10 +753,10 @@ public class Plane extends Vector {
|
|||
// A^2 t^2 / a^2 + 2AA0t / a^2 + A0^2 / a^2 + B^2 t^2 / b^2 + 2BB0t / b^2 + B0^2 / b^2 + C^2 t^2 / c^2 + 2CC0t / c^2 + C0^2 / c^2 - 1,0 = 0.0
|
||||
// [A^2 / a^2 + B^2 / b^2 + C^2 / c^2] t^2 + [2AA0 / a^2 + 2BB0 / b^2 + 2CC0 / c^2] t + [A0^2 / a^2 + B0^2 / b^2 + C0^2 / c^2 - 1,0] = 0.0
|
||||
// Use the quadratic formula to determine t values and candidate point(s)
|
||||
final double A = lineVector.x * lineVector.x * planetModel.inverseAbSquared +
|
||||
lineVector.y * lineVector.y * planetModel.inverseAbSquared +
|
||||
lineVector.z * lineVector.z * planetModel.inverseCSquared;
|
||||
final double B = 2.0 * (lineVector.x * x0 * planetModel.inverseAbSquared + lineVector.y * y0 * planetModel.inverseAbSquared + lineVector.z * z0 * planetModel.inverseCSquared);
|
||||
final double A = lineVectorX * lineVectorX * planetModel.inverseAbSquared +
|
||||
lineVectorY * lineVectorY * planetModel.inverseAbSquared +
|
||||
lineVectorZ * lineVectorZ * planetModel.inverseCSquared;
|
||||
final double B = 2.0 * (lineVectorX * x0 * planetModel.inverseAbSquared + lineVectorY * y0 * planetModel.inverseAbSquared + lineVectorZ * z0 * planetModel.inverseCSquared);
|
||||
final double C = x0 * x0 * planetModel.inverseAbSquared + y0 * y0 * planetModel.inverseAbSquared + z0 * z0 * planetModel.inverseCSquared - 1.0;
|
||||
|
||||
final double BsquaredMinus = B * B - 4.0 * A * C;
|
||||
|
@ -750,12 +765,21 @@ public class Plane extends Vector {
|
|||
final double inverse2A = 1.0 / (2.0 * A);
|
||||
// One solution only
|
||||
final double t = -B * inverse2A;
|
||||
GeoPoint point = new GeoPoint(lineVector.x * t + x0, lineVector.y * t + y0, lineVector.z * t + z0);
|
||||
//System.err.println(" point: "+point);
|
||||
//verifyPoint(planetModel, point, q);
|
||||
if (point.isWithin(bounds, moreBounds))
|
||||
return new GeoPoint[]{point};
|
||||
return NO_POINTS;
|
||||
// Maybe we can save ourselves the cost of construction of a point?
|
||||
final double pointX = lineVectorX * t + x0;
|
||||
final double pointY = lineVectorY * t + y0;
|
||||
final double pointZ = lineVectorZ * t + z0;
|
||||
for (final Membership bound : bounds) {
|
||||
if (!bound.isWithin(pointX, pointY, pointZ)) {
|
||||
return NO_POINTS;
|
||||
}
|
||||
}
|
||||
for (final Membership bound : moreBounds) {
|
||||
if (!bound.isWithin(pointX, pointY, pointZ)) {
|
||||
return NO_POINTS;
|
||||
}
|
||||
}
|
||||
return new GeoPoint[]{new GeoPoint(pointX, pointY, pointZ)};
|
||||
} else if (BsquaredMinus > 0.0) {
|
||||
//System.err.println(" Two points of intersection");
|
||||
final double inverse2A = 1.0 / (2.0 * A);
|
||||
|
@ -763,18 +787,53 @@ public class Plane extends Vector {
|
|||
final double sqrtTerm = Math.sqrt(BsquaredMinus);
|
||||
final double t1 = (-B + sqrtTerm) * inverse2A;
|
||||
final double t2 = (-B - sqrtTerm) * inverse2A;
|
||||
GeoPoint point1 = new GeoPoint(lineVector.x * t1 + x0, lineVector.y * t1 + y0, lineVector.z * t1 + z0);
|
||||
GeoPoint point2 = new GeoPoint(lineVector.x * t2 + x0, lineVector.y * t2 + y0, lineVector.z * t2 + z0);
|
||||
//verifyPoint(planetModel, point1, q);
|
||||
//verifyPoint(planetModel, point2, q);
|
||||
//System.err.println(" "+point1+" and "+point2);
|
||||
if (point1.isWithin(bounds, moreBounds)) {
|
||||
if (point2.isWithin(bounds, moreBounds))
|
||||
return new GeoPoint[]{point1, point2};
|
||||
return new GeoPoint[]{point1};
|
||||
// Up to two points being returned. Do what we can to save on object creation though.
|
||||
final double point1X = lineVectorX * t1 + x0;
|
||||
final double point1Y = lineVectorY * t1 + y0;
|
||||
final double point1Z = lineVectorZ * t1 + z0;
|
||||
final double point2X = lineVectorX * t2 + x0;
|
||||
final double point2Y = lineVectorY * t2 + y0;
|
||||
final double point2Z = lineVectorZ * t2 + z0;
|
||||
boolean point1Valid = true;
|
||||
boolean point2Valid = true;
|
||||
for (final Membership bound : bounds) {
|
||||
if (!bound.isWithin(point1X, point1Y, point1Z)) {
|
||||
point1Valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (point1Valid) {
|
||||
for (final Membership bound : moreBounds) {
|
||||
if (!bound.isWithin(point1X, point1Y, point1Z)) {
|
||||
point1Valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (final Membership bound : bounds) {
|
||||
if (!bound.isWithin(point2X, point2Y, point2Z)) {
|
||||
point2Valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (point2Valid) {
|
||||
for (final Membership bound : moreBounds) {
|
||||
if (!bound.isWithin(point2X, point2Y, point2Z)) {
|
||||
point2Valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (point1Valid && point2Valid) {
|
||||
return new GeoPoint[]{new GeoPoint(point1X, point1Y, point1Z), new GeoPoint(point2X, point2Y, point2Z)};
|
||||
}
|
||||
if (point1Valid) {
|
||||
return new GeoPoint[]{new GeoPoint(point1X, point1Y, point1Z)};
|
||||
}
|
||||
if (point2Valid) {
|
||||
return new GeoPoint[]{new GeoPoint(point2X, point2Y, point2Z)};
|
||||
}
|
||||
if (point2.isWithin(bounds, moreBounds))
|
||||
return new GeoPoint[]{point2};
|
||||
return NO_POINTS;
|
||||
} else {
|
||||
//System.err.println(" no solutions - no intersection");
|
||||
|
@ -797,8 +856,10 @@ public class Plane extends Vector {
|
|||
// This code in this method is very similar to findIntersections(), but eliminates the cases where
|
||||
// crossings are detected.
|
||||
// Unnormalized, unchecked...
|
||||
final Vector lineVector = new Vector(y * q.z - z * q.y, z * q.x - x * q.z, x * q.y - y * q.x);
|
||||
if (Math.abs(lineVector.x) < MINIMUM_RESOLUTION && Math.abs(lineVector.y) < MINIMUM_RESOLUTION && Math.abs(lineVector.z) < MINIMUM_RESOLUTION) {
|
||||
final double lineVectorX = y * q.z - z * q.y;
|
||||
final double lineVectorY = z * q.x - x * q.z;
|
||||
final double lineVectorZ = x * q.y - y * q.x;
|
||||
if (Math.abs(lineVectorX) < MINIMUM_RESOLUTION && Math.abs(lineVectorY) < MINIMUM_RESOLUTION && Math.abs(lineVectorZ) < MINIMUM_RESOLUTION) {
|
||||
// Degenerate case: parallel planes
|
||||
return NO_POINTS;
|
||||
}
|
||||
|
@ -865,10 +926,10 @@ public class Plane extends Vector {
|
|||
// A^2 t^2 / a^2 + 2AA0t / a^2 + A0^2 / a^2 + B^2 t^2 / b^2 + 2BB0t / b^2 + B0^2 / b^2 + C^2 t^2 / c^2 + 2CC0t / c^2 + C0^2 / c^2 - 1,0 = 0.0
|
||||
// [A^2 / a^2 + B^2 / b^2 + C^2 / c^2] t^2 + [2AA0 / a^2 + 2BB0 / b^2 + 2CC0 / c^2] t + [A0^2 / a^2 + B0^2 / b^2 + C0^2 / c^2 - 1,0] = 0.0
|
||||
// Use the quadratic formula to determine t values and candidate point(s)
|
||||
final double A = lineVector.x * lineVector.x * planetModel.inverseAbSquared +
|
||||
lineVector.y * lineVector.y * planetModel.inverseAbSquared +
|
||||
lineVector.z * lineVector.z * planetModel.inverseCSquared;
|
||||
final double B = 2.0 * (lineVector.x * x0 * planetModel.inverseAbSquared + lineVector.y * y0 * planetModel.inverseAbSquared + lineVector.z * z0 * planetModel.inverseCSquared);
|
||||
final double A = lineVectorX * lineVectorX * planetModel.inverseAbSquared +
|
||||
lineVectorY * lineVectorY * planetModel.inverseAbSquared +
|
||||
lineVectorZ * lineVectorZ * planetModel.inverseCSquared;
|
||||
final double B = 2.0 * (lineVectorX * x0 * planetModel.inverseAbSquared + lineVectorY * y0 * planetModel.inverseAbSquared + lineVectorZ * z0 * planetModel.inverseCSquared);
|
||||
final double C = x0 * x0 * planetModel.inverseAbSquared + y0 * y0 * planetModel.inverseAbSquared + z0 * z0 * planetModel.inverseCSquared - 1.0;
|
||||
|
||||
final double BsquaredMinus = B * B - 4.0 * A * C;
|
||||
|
@ -881,18 +942,53 @@ public class Plane extends Vector {
|
|||
final double sqrtTerm = Math.sqrt(BsquaredMinus);
|
||||
final double t1 = (-B + sqrtTerm) * inverse2A;
|
||||
final double t2 = (-B - sqrtTerm) * inverse2A;
|
||||
GeoPoint point1 = new GeoPoint(lineVector.x * t1 + x0, lineVector.y * t1 + y0, lineVector.z * t1 + z0);
|
||||
GeoPoint point2 = new GeoPoint(lineVector.x * t2 + x0, lineVector.y * t2 + y0, lineVector.z * t2 + z0);
|
||||
//verifyPoint(planetModel, point1, q);
|
||||
//verifyPoint(planetModel, point2, q);
|
||||
//System.err.println(" Considering points "+point1+" and "+point2);
|
||||
if (point1.isWithin(bounds, moreBounds)) {
|
||||
if (point2.isWithin(bounds, moreBounds))
|
||||
return new GeoPoint[]{point1, point2};
|
||||
return new GeoPoint[]{point1};
|
||||
// Up to two points being returned. Do what we can to save on object creation though.
|
||||
final double point1X = lineVectorX * t1 + x0;
|
||||
final double point1Y = lineVectorY * t1 + y0;
|
||||
final double point1Z = lineVectorZ * t1 + z0;
|
||||
final double point2X = lineVectorX * t2 + x0;
|
||||
final double point2Y = lineVectorY * t2 + y0;
|
||||
final double point2Z = lineVectorZ * t2 + z0;
|
||||
boolean point1Valid = true;
|
||||
boolean point2Valid = true;
|
||||
for (final Membership bound : bounds) {
|
||||
if (!bound.isWithin(point1X, point1Y, point1Z)) {
|
||||
point1Valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (point1Valid) {
|
||||
for (final Membership bound : moreBounds) {
|
||||
if (!bound.isWithin(point1X, point1Y, point1Z)) {
|
||||
point1Valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (final Membership bound : bounds) {
|
||||
if (!bound.isWithin(point2X, point2Y, point2Z)) {
|
||||
point2Valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (point2Valid) {
|
||||
for (final Membership bound : moreBounds) {
|
||||
if (!bound.isWithin(point2X, point2Y, point2Z)) {
|
||||
point2Valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (point1Valid && point2Valid) {
|
||||
return new GeoPoint[]{new GeoPoint(point1X, point1Y, point1Z), new GeoPoint(point2X, point2Y, point2Z)};
|
||||
}
|
||||
if (point1Valid) {
|
||||
return new GeoPoint[]{new GeoPoint(point1X, point1Y, point1Z)};
|
||||
}
|
||||
if (point2Valid) {
|
||||
return new GeoPoint[]{new GeoPoint(point2X, point2Y, point2Z)};
|
||||
}
|
||||
if (point2.isWithin(bounds, moreBounds))
|
||||
return new GeoPoint[]{point2};
|
||||
return NO_POINTS;
|
||||
} else {
|
||||
// No solutions.
|
||||
|
@ -1667,7 +1763,159 @@ public class Plane extends Vector {
|
|||
//System.err.println(" no notable points inside found; no intersection");
|
||||
return false;
|
||||
}
|
||||
return findIntersections(planetModel, q, bounds, moreBounds).length > 0;
|
||||
|
||||
// Save on allocations; do inline instead of calling findIntersections
|
||||
//System.err.println("Looking for intersection between plane "+this+" and plane "+q+" within bounds");
|
||||
// Unnormalized, unchecked...
|
||||
final double lineVectorX = y * q.z - z * q.y;
|
||||
final double lineVectorY = z * q.x - x * q.z;
|
||||
final double lineVectorZ = x * q.y - y * q.x;
|
||||
|
||||
if (Math.abs(lineVectorX) < MINIMUM_RESOLUTION && Math.abs(lineVectorY) < MINIMUM_RESOLUTION && Math.abs(lineVectorZ) < MINIMUM_RESOLUTION) {
|
||||
// Degenerate case: parallel planes
|
||||
//System.err.println(" planes are parallel - no intersection");
|
||||
return false;
|
||||
}
|
||||
|
||||
// The line will have the equation: A t + A0 = x, B t + B0 = y, C t + C0 = z.
|
||||
// We have A, B, and C. In order to come up with A0, B0, and C0, we need to find a point that is on both planes.
|
||||
// To do this, we find the largest vector value (either x, y, or z), and look for a point that solves both plane equations
|
||||
// simultaneous. For example, let's say that the vector is (0.5,0.5,1), and the two plane equations are:
|
||||
// 0.7 x + 0.3 y + 0.1 z + 0.0 = 0
|
||||
// and
|
||||
// 0.9 x - 0.1 y + 0.2 z + 4.0 = 0
|
||||
// Then we'd pick z = 0, so the equations to solve for x and y would be:
|
||||
// 0.7 x + 0.3y = 0.0
|
||||
// 0.9 x - 0.1y = -4.0
|
||||
// ... which can readily be solved using standard linear algebra. Generally:
|
||||
// Q0 x + R0 y = S0
|
||||
// Q1 x + R1 y = S1
|
||||
// ... can be solved by Cramer's rule:
|
||||
// x = det(S0 R0 / S1 R1) / det(Q0 R0 / Q1 R1)
|
||||
// y = det(Q0 S0 / Q1 S1) / det(Q0 R0 / Q1 R1)
|
||||
// ... where det( a b / c d ) = ad - bc, so:
|
||||
// x = (S0 * R1 - R0 * S1) / (Q0 * R1 - R0 * Q1)
|
||||
// y = (Q0 * S1 - S0 * Q1) / (Q0 * R1 - R0 * Q1)
|
||||
double x0;
|
||||
double y0;
|
||||
double z0;
|
||||
// We try to maximize the determinant in the denominator
|
||||
final double denomYZ = this.y * q.z - this.z * q.y;
|
||||
final double denomXZ = this.x * q.z - this.z * q.x;
|
||||
final double denomXY = this.x * q.y - this.y * q.x;
|
||||
if (Math.abs(denomYZ) >= Math.abs(denomXZ) && Math.abs(denomYZ) >= Math.abs(denomXY)) {
|
||||
// X is the biggest, so our point will have x0 = 0.0
|
||||
if (Math.abs(denomYZ) < MINIMUM_RESOLUTION_SQUARED) {
|
||||
//System.err.println(" Denominator is zero: no intersection");
|
||||
return false;
|
||||
}
|
||||
final double denom = 1.0 / denomYZ;
|
||||
x0 = 0.0;
|
||||
y0 = (-this.D * q.z - this.z * -q.D) * denom;
|
||||
z0 = (this.y * -q.D + this.D * q.y) * denom;
|
||||
} else if (Math.abs(denomXZ) >= Math.abs(denomXY) && Math.abs(denomXZ) >= Math.abs(denomYZ)) {
|
||||
// Y is the biggest, so y0 = 0.0
|
||||
if (Math.abs(denomXZ) < MINIMUM_RESOLUTION_SQUARED) {
|
||||
//System.err.println(" Denominator is zero: no intersection");
|
||||
return false;
|
||||
}
|
||||
final double denom = 1.0 / denomXZ;
|
||||
x0 = (-this.D * q.z - this.z * -q.D) * denom;
|
||||
y0 = 0.0;
|
||||
z0 = (this.x * -q.D + this.D * q.x) * denom;
|
||||
} else {
|
||||
// Z is the biggest, so Z0 = 0.0
|
||||
if (Math.abs(denomXY) < MINIMUM_RESOLUTION_SQUARED) {
|
||||
//System.err.println(" Denominator is zero: no intersection");
|
||||
return false;
|
||||
}
|
||||
final double denom = 1.0 / denomXY;
|
||||
x0 = (-this.D * q.y - this.y * -q.D) * denom;
|
||||
y0 = (this.x * -q.D + this.D * q.x) * denom;
|
||||
z0 = 0.0;
|
||||
}
|
||||
|
||||
// Once an intersecting line is determined, the next step is to intersect that line with the ellipsoid, which
|
||||
// will yield zero, one, or two points.
|
||||
// The ellipsoid equation: 1,0 = x^2/a^2 + y^2/b^2 + z^2/c^2
|
||||
// 1.0 = (At+A0)^2/a^2 + (Bt+B0)^2/b^2 + (Ct+C0)^2/c^2
|
||||
// A^2 t^2 / a^2 + 2AA0t / a^2 + A0^2 / a^2 + B^2 t^2 / b^2 + 2BB0t / b^2 + B0^2 / b^2 + C^2 t^2 / c^2 + 2CC0t / c^2 + C0^2 / c^2 - 1,0 = 0.0
|
||||
// [A^2 / a^2 + B^2 / b^2 + C^2 / c^2] t^2 + [2AA0 / a^2 + 2BB0 / b^2 + 2CC0 / c^2] t + [A0^2 / a^2 + B0^2 / b^2 + C0^2 / c^2 - 1,0] = 0.0
|
||||
// Use the quadratic formula to determine t values and candidate point(s)
|
||||
final double A = lineVectorX * lineVectorX * planetModel.inverseAbSquared +
|
||||
lineVectorY * lineVectorY * planetModel.inverseAbSquared +
|
||||
lineVectorZ * lineVectorZ * planetModel.inverseCSquared;
|
||||
final double B = 2.0 * (lineVectorX * x0 * planetModel.inverseAbSquared + lineVectorY * y0 * planetModel.inverseAbSquared + lineVectorZ * z0 * planetModel.inverseCSquared);
|
||||
final double C = x0 * x0 * planetModel.inverseAbSquared + y0 * y0 * planetModel.inverseAbSquared + z0 * z0 * planetModel.inverseCSquared - 1.0;
|
||||
|
||||
final double BsquaredMinus = B * B - 4.0 * A * C;
|
||||
if (Math.abs(BsquaredMinus) < MINIMUM_RESOLUTION_SQUARED) {
|
||||
//System.err.println(" One point of intersection");
|
||||
final double inverse2A = 1.0 / (2.0 * A);
|
||||
// One solution only
|
||||
final double t = -B * inverse2A;
|
||||
// Maybe we can save ourselves the cost of construction of a point?
|
||||
final double pointX = lineVectorX * t + x0;
|
||||
final double pointY = lineVectorY * t + y0;
|
||||
final double pointZ = lineVectorZ * t + z0;
|
||||
for (final Membership bound : bounds) {
|
||||
if (!bound.isWithin(pointX, pointY, pointZ)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
for (final Membership bound : moreBounds) {
|
||||
if (!bound.isWithin(pointX, pointY, pointZ)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} else if (BsquaredMinus > 0.0) {
|
||||
//System.err.println(" Two points of intersection");
|
||||
final double inverse2A = 1.0 / (2.0 * A);
|
||||
// Two solutions
|
||||
final double sqrtTerm = Math.sqrt(BsquaredMinus);
|
||||
final double t1 = (-B + sqrtTerm) * inverse2A;
|
||||
final double t2 = (-B - sqrtTerm) * inverse2A;
|
||||
// Up to two points being returned. Do what we can to save on object creation though.
|
||||
final double point1X = lineVectorX * t1 + x0;
|
||||
final double point1Y = lineVectorY * t1 + y0;
|
||||
final double point1Z = lineVectorZ * t1 + z0;
|
||||
boolean point1Valid = true;
|
||||
for (final Membership bound : bounds) {
|
||||
if (!bound.isWithin(point1X, point1Y, point1Z)) {
|
||||
point1Valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (point1Valid) {
|
||||
for (final Membership bound : moreBounds) {
|
||||
if (!bound.isWithin(point1X, point1Y, point1Z)) {
|
||||
point1Valid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (point1Valid) {
|
||||
return true;
|
||||
}
|
||||
final double point2X = lineVectorX * t2 + x0;
|
||||
final double point2Y = lineVectorY * t2 + y0;
|
||||
final double point2Z = lineVectorZ * t2 + z0;
|
||||
for (final Membership bound : bounds) {
|
||||
if (!bound.isWithin(point2X, point2Y, point2Z)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
for (final Membership bound : moreBounds) {
|
||||
if (!bound.isWithin(point2X, point2Y, point2Z)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
//System.err.println(" no solutions - no intersection");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -242,7 +242,7 @@ public class PlanetModel {
|
|||
* @param pt1 is the first point.
|
||||
* @param pt2 is the second point.
|
||||
* @return the adjusted angle, when multiplied by the mean earth radius, yields a surface distance. This will differ
|
||||
* from GeoPoint.arcDistance() only when the planet model is not a sphere. @see {@link GeoPoint#arcDistance(GeoPoint)}
|
||||
* from GeoPoint.arcDistance() only when the planet model is not a sphere. @see {@link GeoPoint#arcDistance(Vector)}
|
||||
*/
|
||||
public double surfaceDistance(final GeoPoint pt1, final GeoPoint pt2) {
|
||||
final double L = pt2.getLongitude() - pt1.getLongitude();
|
||||
|
|
|
@ -36,6 +36,23 @@ public class SidedPlane extends Plane implements Membership {
|
|||
this.sigNum = -sidedPlane.sigNum;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a sided plane from a pair of vectors describing points, and including
|
||||
* origin, plus a point p which describes the side.
|
||||
*
|
||||
* @param pX point X to evaluate
|
||||
* @param pY point Y to evaluate
|
||||
* @param pZ point Z to evaluate
|
||||
* @param A is the first in-plane point
|
||||
* @param B is the second in-plane point
|
||||
*/
|
||||
public SidedPlane(final double pX, final double pY, final double pZ, final Vector A, final Vector B) {
|
||||
super(A, B);
|
||||
sigNum = Math.signum(evaluate(pX, pY, pZ));
|
||||
if (sigNum == 0.0)
|
||||
throw new IllegalArgumentException("Cannot determine sidedness because check point is on plane.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a sided plane from a pair of vectors describing points, and including
|
||||
* origin, plus a point p which describes the side.
|
||||
|
@ -51,6 +68,23 @@ public class SidedPlane extends Plane implements Membership {
|
|||
throw new IllegalArgumentException("Cannot determine sidedness because check point is on plane.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a sided plane from a pair of vectors describing points, and including
|
||||
* origin, plus a point p which describes the side.
|
||||
*
|
||||
* @param p point to evaluate
|
||||
* @param A is the first in-plane point
|
||||
* @param BX is the X value of the second in-plane point
|
||||
* @param BY is the Y value of the second in-plane point
|
||||
* @param BZ is the Z value of the second in-plane point
|
||||
*/
|
||||
public SidedPlane(final Vector p, final Vector A, final double BX, final double BY, final double BZ) {
|
||||
super(A, BX, BY, BZ);
|
||||
sigNum = Math.signum(evaluate(p));
|
||||
if (sigNum == 0.0)
|
||||
throw new IllegalArgumentException("Cannot determine sidedness because check point is on plane.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a sided plane from a pair of vectors describing points, and including
|
||||
* origin, plus a point p which describes the side.
|
||||
|
|
|
@ -56,6 +56,34 @@ public class Vector {
|
|||
this.z = z;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a vector that is perpendicular to
|
||||
* two other (non-zero) vectors. If the vectors are parallel,
|
||||
* IllegalArgumentException will be thrown.
|
||||
* Produces a normalized final vector.
|
||||
*
|
||||
* @param A is the first vector
|
||||
* @param BX is the X value of the second
|
||||
* @param BY is the Y value of the second
|
||||
* @param BZ is the Z value of the second
|
||||
*/
|
||||
public Vector(final Vector A, final double BX, final double BY, final double BZ) {
|
||||
// x = u2v3 - u3v2
|
||||
// y = u3v1 - u1v3
|
||||
// z = u1v2 - u2v1
|
||||
final double thisX = A.y * BZ - A.z * BY;
|
||||
final double thisY = A.z * BX - A.x * BZ;
|
||||
final double thisZ = A.x * BY - A.y * BX;
|
||||
final double magnitude = magnitude(thisX, thisY, thisZ);
|
||||
if (Math.abs(magnitude) < MINIMUM_RESOLUTION) {
|
||||
throw new IllegalArgumentException("Degenerate/parallel vector constructed");
|
||||
}
|
||||
final double inverseMagnitude = 1.0 / magnitude;
|
||||
this.x = thisX * inverseMagnitude;
|
||||
this.y = thisY * inverseMagnitude;
|
||||
this.z = thisZ * inverseMagnitude;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a vector that is perpendicular to
|
||||
* two other (non-zero) vectors. If the vectors are parallel,
|
||||
|
@ -133,16 +161,16 @@ public class Vector {
|
|||
* @param moreBounds is the second part of the set of planes.
|
||||
* @return true if the point is within the bounds.
|
||||
*/
|
||||
public boolean isWithin(final Membership[] bounds, final Membership[] moreBounds) {
|
||||
public boolean isWithin(final Membership[] bounds, final Membership... moreBounds) {
|
||||
// Return true if the point described is within all provided bounds
|
||||
//System.err.println(" checking if "+this+" is within bounds");
|
||||
for (Membership bound : bounds) {
|
||||
for (final Membership bound : bounds) {
|
||||
if (bound != null && !bound.isWithin(this)) {
|
||||
//System.err.println(" NOT within "+bound);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
for (Membership bound : moreBounds) {
|
||||
for (final Membership bound : moreBounds) {
|
||||
if (bound != null && !bound.isWithin(this)) {
|
||||
//System.err.println(" NOT within "+bound);
|
||||
return false;
|
||||
|
@ -328,6 +356,20 @@ public class Vector {
|
|||
return magnitude(x,y,z);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute whether two vectors are numerically identical.
|
||||
* @param otherX is the other vector X.
|
||||
* @param otherY is the other vector Y.
|
||||
* @param otherZ is the other vector Z.
|
||||
* @return true if they are numerically identical.
|
||||
*/
|
||||
public boolean isNumericallyIdentical(final double otherX, final double otherY, final double otherZ) {
|
||||
final double thisX = y * otherZ - z * otherY;
|
||||
final double thisY = z * otherX - x * otherZ;
|
||||
final double thisZ = x * otherY - y * otherX;
|
||||
return thisX * thisX + thisY * thisY + thisZ * thisZ < MINIMUM_RESOLUTION_SQUARED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute whether two vectors are numerically identical.
|
||||
* @param other is the other vector.
|
||||
|
|
|
@ -124,6 +124,8 @@ New Features
|
|||
|
||||
* SOLR-9049: RuleBasedAuthorizationPlugin supports regex in param values eg: "command" : "REGEX:(i?)create" (noble)
|
||||
|
||||
* SOLR-8972: Add GraphHandler and GraphMLResponseWriter to support graph visualizations (Joel Bernstein)
|
||||
|
||||
Bug Fixes
|
||||
----------------------
|
||||
|
||||
|
@ -179,6 +181,17 @@ Bug Fixes
|
|||
|
||||
* SOLR-8792: ZooKeeper ACL support fixed. (Esther Quansah, Ishan Chattopadhyaya, Steve Rowe)
|
||||
|
||||
* SOLR-9064: Adds an explanation of the incoming stream to an UpdateStream's explanation (Dennis Gove)
|
||||
|
||||
* SOLR-9030: The 'downnode' overseer command can trip asserts in ZkStateWriter.
|
||||
(Scott Blum, Mark Miller, shalin)
|
||||
|
||||
* SOLR-9036: Solr slave is doing full replication (entire index) of index after master restart.
|
||||
(Lior Sapir, Mark Miller, shalin)
|
||||
|
||||
* SOLR-9058: Makes HashJoinStream and OuterHashJoinStream support different field names in the
|
||||
incoming streams, eg. fieldA=fieldB. (Dennis Gove, Stephan Osthold)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
* SOLR-8722: Don't force a full ZkStateReader refresh on every Overseer operation.
|
||||
|
@ -241,7 +254,15 @@ Other Changes
|
|||
|
||||
* SOLR-9047: zkcli should allow alternative locations for log4j configuration (Gregory Chanan)
|
||||
|
||||
* SOLR-9053: Upgrade commons-fileupload to 1.3.1, fixing a potential vulnerability (Jeff Field, janhoy)
|
||||
* SOLR-9053: Upgrade commons-fileupload to 1.3.1, fixing a potential vulnerability (Jeff Field, Mike Drob via janhoy)
|
||||
|
||||
* SOLR-9066: Make CountMetric return long instead of double (Kevin Risden)
|
||||
|
||||
* SOLR-9065: Migrate SolrJ distributed tests to SolrCloudTestCase. (Alan Woodward)
|
||||
|
||||
* SOLR-8184: Negative tests for JDBC Connection String (Susheel Kumar, Jason Gerlowski, Kevin Risden)
|
||||
|
||||
* SOLR-8458: Add Streaming Expressions tests for parameter substitution (Joel Bernstein, Cao Manh Dat, Dennis Gove, Kevin Risden)
|
||||
|
||||
================== 6.0.0 ==================
|
||||
|
||||
|
|
|
@ -233,8 +233,9 @@ public class Assign {
|
|||
}
|
||||
DocCollection coll = clusterState.getCollection(collectionName);
|
||||
Integer maxShardsPerNode = coll.getInt(MAX_SHARDS_PER_NODE, 1);
|
||||
for (String s : clusterState.getCollections()) {
|
||||
DocCollection c = clusterState.getCollection(s);
|
||||
Map<String, DocCollection> collections = clusterState.getCollectionsMap();
|
||||
for (Map.Entry<String, DocCollection> entry : collections.entrySet()) {
|
||||
DocCollection c = entry.getValue();
|
||||
//identify suitable nodes by checking the no:of cores in each of them
|
||||
for (Slice slice : c.getSlices()) {
|
||||
Collection<Replica> replicas = slice.getReplicas();
|
||||
|
@ -242,7 +243,7 @@ public class Assign {
|
|||
ReplicaCount count = nodeNameVsShardCount.get(replica.getNodeName());
|
||||
if (count != null) {
|
||||
count.totalNodes++; // Used ot "weigh" whether this node should be used later.
|
||||
if (s.equals(collectionName)) {
|
||||
if (entry.getKey().equals(collectionName)) {
|
||||
count.thisCollectionNodes++;
|
||||
if (count.thisCollectionNodes >= maxShardsPerNode) nodeNameVsShardCount.remove(replica.getNodeName());
|
||||
}
|
||||
|
|
|
@ -162,10 +162,10 @@ public class OverseerAutoReplicaFailoverThread implements Runnable, Closeable {
|
|||
|
||||
liveNodes = clusterState.getLiveNodes();
|
||||
lastClusterStateVersion = clusterState.getZkClusterStateVersion();
|
||||
Set<String> collections = clusterState.getCollections();
|
||||
for (final String collection : collections) {
|
||||
log.debug("look at collection={}", collection);
|
||||
DocCollection docCollection = clusterState.getCollection(collection);
|
||||
Map<String, DocCollection> collections = clusterState.getCollectionsMap();
|
||||
for (Map.Entry<String, DocCollection> entry : collections.entrySet()) {
|
||||
log.debug("look at collection={}", entry.getKey());
|
||||
DocCollection docCollection = entry.getValue();
|
||||
if (!docCollection.getAutoAddReplicas()) {
|
||||
log.debug("Collection {} is not setup to use autoAddReplicas, skipping..", docCollection.getName());
|
||||
continue;
|
||||
|
@ -174,7 +174,7 @@ public class OverseerAutoReplicaFailoverThread implements Runnable, Closeable {
|
|||
log.debug("Skipping collection because it has no defined replicationFactor, name={}", docCollection.getName());
|
||||
continue;
|
||||
}
|
||||
log.debug("Found collection, name={} replicationFactor={}", collection, docCollection.getReplicationFactor());
|
||||
log.debug("Found collection, name={} replicationFactor={}", entry.getKey(), docCollection.getReplicationFactor());
|
||||
|
||||
Collection<Slice> slices = docCollection.getSlices();
|
||||
for (Slice slice : slices) {
|
||||
|
@ -188,7 +188,7 @@ public class OverseerAutoReplicaFailoverThread implements Runnable, Closeable {
|
|||
|
||||
if (downReplicas.size() > 0 && goodReplicas < docCollection.getReplicationFactor()) {
|
||||
// badReplicaMap.put(collection, badReplicas);
|
||||
processBadReplicas(collection, downReplicas);
|
||||
processBadReplicas(entry.getKey(), downReplicas);
|
||||
} else if (goodReplicas > docCollection.getReplicationFactor()) {
|
||||
log.debug("There are too many replicas");
|
||||
}
|
||||
|
@ -313,10 +313,11 @@ public class OverseerAutoReplicaFailoverThread implements Runnable, Closeable {
|
|||
|
||||
ClusterState clusterState = zkStateReader.getClusterState();
|
||||
if (clusterState != null) {
|
||||
Set<String> collections = clusterState.getCollections();
|
||||
for (String collection : collections) {
|
||||
log.debug("look at collection {} as possible create candidate", collection);
|
||||
DocCollection docCollection = clusterState.getCollection(collection);
|
||||
Map<String, DocCollection> collections = clusterState.getCollectionsMap();
|
||||
for (Map.Entry<String, DocCollection> entry : collections.entrySet()) {
|
||||
String collection = entry.getKey();
|
||||
log.debug("look at collection {} as possible create candidate", collection);
|
||||
DocCollection docCollection = entry.getValue();
|
||||
// TODO - only operate on collections with sharedfs failover = true ??
|
||||
Collection<Slice> slices = docCollection.getSlices();
|
||||
for (Slice slice : slices) {
|
||||
|
|
|
@ -350,10 +350,10 @@ public class OverseerConfigSetMessageHandler implements OverseerMessageHandler {
|
|||
throw new SolrException(ErrorCode.BAD_REQUEST, "ConfigSet does not exist to delete: " + configSetName);
|
||||
}
|
||||
|
||||
for (String s : zkStateReader.getClusterState().getCollections()) {
|
||||
if (configSetName.equals(zkStateReader.readConfigName(s)))
|
||||
for (Map.Entry<String, DocCollection> entry : zkStateReader.getClusterState().getCollectionsMap().entrySet()) {
|
||||
if (configSetName.equals(zkStateReader.readConfigName(entry.getKey())))
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST,
|
||||
"Can not delete ConfigSet as it is currently being used by collection [" + s + "]");
|
||||
"Can not delete ConfigSet as it is currently being used by collection [" + entry.getKey() + "]");
|
||||
}
|
||||
|
||||
String propertyPath = ConfigSetProperties.DEFAULT_FILENAME;
|
||||
|
|
|
@ -689,14 +689,12 @@ public final class ZkController {
|
|||
long now = System.nanoTime();
|
||||
long timeout = now + TimeUnit.NANOSECONDS.convert(WAIT_DOWN_STATES_TIMEOUT_SECONDS, TimeUnit.SECONDS);
|
||||
boolean foundStates = true;
|
||||
ClusterState clusterState = zkStateReader.getClusterState();
|
||||
Set<String> collections = clusterState.getCollections();
|
||||
|
||||
|
||||
while (System.nanoTime() < timeout) {
|
||||
clusterState = zkStateReader.getClusterState();
|
||||
collections = clusterState.getCollections();
|
||||
for (String collectionName : collections) {
|
||||
DocCollection collection = clusterState.getCollection(collectionName);
|
||||
ClusterState clusterState = zkStateReader.getClusterState();
|
||||
Map<String, DocCollection> collections = clusterState.getCollectionsMap();
|
||||
for (Map.Entry<String, DocCollection> entry : collections.entrySet()) {
|
||||
DocCollection collection = entry.getValue();
|
||||
Collection<Slice> slices = collection.getSlices();
|
||||
for (Slice slice : slices) {
|
||||
Collection<Replica> replicas = slice.getReplicas();
|
||||
|
|
|
@ -49,14 +49,13 @@ public class NodeMutator {
|
|||
|
||||
log.info("DownNode state invoked for node: " + nodeName);
|
||||
|
||||
Set<String> collections = clusterState.getCollections();
|
||||
for (String collection : collections) {
|
||||
DocCollection docCollection = clusterState.getCollection(collection);
|
||||
Map<String, DocCollection> collections = clusterState.getCollectionsMap();
|
||||
for (Map.Entry<String, DocCollection> entry : collections.entrySet()) {
|
||||
DocCollection docCollection = entry.getValue();
|
||||
Map<String,Slice> slicesCopy = new LinkedHashMap<>(docCollection.getSlicesMap());
|
||||
|
||||
Set<Entry<String,Slice>> entries = slicesCopy.entrySet();
|
||||
for (Entry<String,Slice> entry : entries) {
|
||||
Slice slice = docCollection.getSlice(entry.getKey());
|
||||
for (Entry<String,Slice> sliceEntry : slicesCopy.entrySet()) {
|
||||
Slice slice = docCollection.getSlice(sliceEntry.getKey());
|
||||
Map<String,Replica> newReplicas = new HashMap<String,Replica>();
|
||||
|
||||
Collection<Replica> replicas = slice.getReplicas();
|
||||
|
@ -77,7 +76,7 @@ public class NodeMutator {
|
|||
|
||||
}
|
||||
|
||||
zkWriteCommands.add(new ZkWriteCommand(collection, docCollection.copyWithSlices(slicesCopy)));
|
||||
zkWriteCommands.add(new ZkWriteCommand(entry.getKey(), docCollection.copyWithSlices(slicesCopy)));
|
||||
}
|
||||
|
||||
return zkWriteCommands;
|
||||
|
|
|
@ -229,7 +229,6 @@ public class ZkStateWriter {
|
|||
byte[] data = Utils.toJSON(singletonMap(c.getName(), c));
|
||||
if (reader.getZkClient().exists(path, true)) {
|
||||
log.info("going to update_collection {} version: {}", path, c.getZNodeVersion());
|
||||
assert c.getZNodeVersion() >= 0;
|
||||
Stat stat = reader.getZkClient().setData(path, data, c.getZNodeVersion(), true);
|
||||
DocCollection newCollection = new DocCollection(name, c.getSlicesMap(), c.getProperties(), c.getRouter(), stat.getVersion(), path);
|
||||
clusterState = clusterState.copyWith(name, newCollection);
|
||||
|
@ -251,13 +250,9 @@ public class ZkStateWriter {
|
|||
assert clusterState.getZkClusterStateVersion() >= 0;
|
||||
byte[] data = Utils.toJSON(clusterState);
|
||||
Stat stat = reader.getZkClient().setData(ZkStateReader.CLUSTER_STATE, data, clusterState.getZkClusterStateVersion(), true);
|
||||
Set<String> collectionNames = clusterState.getCollections();
|
||||
Map<String, DocCollection> collectionStates = new HashMap<>(collectionNames.size());
|
||||
for (String c : collectionNames) {
|
||||
collectionStates.put(c, clusterState.getCollection(c));
|
||||
}
|
||||
Map<String, DocCollection> collections = clusterState.getCollectionsMap();
|
||||
// use the reader's live nodes because our cluster state's live nodes may be stale
|
||||
clusterState = new ClusterState(stat.getVersion(), reader.getClusterState().getLiveNodes(), collectionStates);
|
||||
clusterState = new ClusterState(stat.getVersion(), reader.getClusterState().getLiveNodes(), collections);
|
||||
isClusterStateModified = false;
|
||||
}
|
||||
lastUpdatedTime = System.nanoTime();
|
||||
|
|
|
@ -104,8 +104,9 @@ public class ReplicaAssigner {
|
|||
validateTags(nodeVsTags);
|
||||
|
||||
if (clusterState != null) {
|
||||
for (String s : clusterState.getCollections()) {
|
||||
DocCollection coll = clusterState.getCollection(s);
|
||||
Map<String, DocCollection> collections = clusterState.getCollectionsMap();
|
||||
for (Map.Entry<String, DocCollection> entry : collections.entrySet()) {
|
||||
DocCollection coll = entry.getValue();
|
||||
for (Slice slice : coll.getSlices()) {
|
||||
for (Replica replica : slice.getReplicas()) {
|
||||
AtomicInteger count = nodeVsCores.get(replica.getNodeName());
|
||||
|
|
|
@ -2111,6 +2111,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
|
|||
m.put("standard", m.get("xml"));
|
||||
m.put(CommonParams.JSON, new JSONResponseWriter());
|
||||
m.put("geojson", new GeoJSONResponseWriter());
|
||||
m.put("graphml", new GraphMLResponseWriter());
|
||||
m.put("python", new PythonResponseWriter());
|
||||
m.put("php", new PHPResponseWriter());
|
||||
m.put("phps", new PHPSerializedResponseWriter());
|
||||
|
|
|
@ -0,0 +1,282 @@
|
|||
package org.apache.solr.handler;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.solr.client.solrj.io.SolrClientCache;
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.client.solrj.io.comp.StreamComparator;
|
||||
import org.apache.solr.client.solrj.io.graph.GatherNodesStream;
|
||||
import org.apache.solr.client.solrj.io.graph.ShortestPathStream;
|
||||
import org.apache.solr.client.solrj.io.graph.Traversal;
|
||||
import org.apache.solr.client.solrj.io.ops.ConcatOperation;
|
||||
import org.apache.solr.client.solrj.io.ops.DistinctOperation;
|
||||
import org.apache.solr.client.solrj.io.ops.GroupOperation;
|
||||
import org.apache.solr.client.solrj.io.ops.ReplaceOperation;
|
||||
import org.apache.solr.client.solrj.io.stream.*;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.CountMetric;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.MaxMetric;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.MeanMetric;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.MinMetric;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.SumMetric;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.core.CloseHook;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.response.SolrQueryResponse;
|
||||
import org.apache.solr.security.AuthorizationContext;
|
||||
import org.apache.solr.security.PermissionNameProvider;
|
||||
import org.apache.solr.util.plugin.SolrCoreAware;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
|
||||
|
||||
private StreamFactory streamFactory = new StreamFactory();
|
||||
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
private String coreName;
|
||||
|
||||
@Override
|
||||
public PermissionNameProvider.Name getPermissionName(AuthorizationContext request) {
|
||||
return PermissionNameProvider.Name.READ_PERM;
|
||||
}
|
||||
|
||||
public void inform(SolrCore core) {
|
||||
|
||||
/* The stream factory will always contain the zkUrl for the given collection
|
||||
* Adds default streams with their corresponding function names. These
|
||||
* defaults can be overridden or added to in the solrConfig in the stream
|
||||
* RequestHandler def. Example config override
|
||||
* <lst name="streamFunctions">
|
||||
* <str name="group">org.apache.solr.client.solrj.io.stream.ReducerStream</str>
|
||||
* <str name="count">org.apache.solr.client.solrj.io.stream.RecordCountStream</str>
|
||||
* </lst>
|
||||
* */
|
||||
|
||||
String defaultCollection = null;
|
||||
String defaultZkhost = null;
|
||||
CoreContainer coreContainer = core.getCoreDescriptor().getCoreContainer();
|
||||
this.coreName = core.getName();
|
||||
|
||||
if(coreContainer.isZooKeeperAware()) {
|
||||
defaultCollection = core.getCoreDescriptor().getCollectionName();
|
||||
defaultZkhost = core.getCoreDescriptor().getCoreContainer().getZkController().getZkServerAddress();
|
||||
streamFactory.withCollectionZkHost(defaultCollection, defaultZkhost);
|
||||
streamFactory.withDefaultZkHost(defaultZkhost);
|
||||
}
|
||||
|
||||
streamFactory
|
||||
// streams
|
||||
.withFunctionName("search", CloudSolrStream.class)
|
||||
.withFunctionName("merge", MergeStream.class)
|
||||
.withFunctionName("unique", UniqueStream.class)
|
||||
.withFunctionName("top", RankStream.class)
|
||||
.withFunctionName("group", GroupOperation.class)
|
||||
.withFunctionName("reduce", ReducerStream.class)
|
||||
.withFunctionName("parallel", ParallelStream.class)
|
||||
.withFunctionName("rollup", RollupStream.class)
|
||||
.withFunctionName("stats", StatsStream.class)
|
||||
.withFunctionName("innerJoin", InnerJoinStream.class)
|
||||
.withFunctionName("leftOuterJoin", LeftOuterJoinStream.class)
|
||||
.withFunctionName("hashJoin", HashJoinStream.class)
|
||||
.withFunctionName("outerHashJoin", OuterHashJoinStream.class)
|
||||
.withFunctionName("facet", FacetStream.class)
|
||||
.withFunctionName("update", UpdateStream.class)
|
||||
.withFunctionName("jdbc", JDBCStream.class)
|
||||
.withFunctionName("intersect", IntersectStream.class)
|
||||
.withFunctionName("complement", ComplementStream.class)
|
||||
.withFunctionName("daemon", DaemonStream.class)
|
||||
.withFunctionName("topic", TopicStream.class)
|
||||
.withFunctionName("shortestPath", ShortestPathStream.class)
|
||||
.withFunctionName("gatherNodes", GatherNodesStream.class)
|
||||
.withFunctionName("sort", SortStream.class)
|
||||
|
||||
|
||||
// metrics
|
||||
.withFunctionName("min", MinMetric.class)
|
||||
.withFunctionName("max", MaxMetric.class)
|
||||
.withFunctionName("avg", MeanMetric.class)
|
||||
.withFunctionName("sum", SumMetric.class)
|
||||
.withFunctionName("count", CountMetric.class)
|
||||
|
||||
// tuple manipulation operations
|
||||
.withFunctionName("replace", ReplaceOperation.class)
|
||||
.withFunctionName("concat", ConcatOperation.class)
|
||||
|
||||
// stream reduction operations
|
||||
.withFunctionName("group", GroupOperation.class)
|
||||
.withFunctionName("distinct", DistinctOperation.class);
|
||||
|
||||
// This pulls all the overrides and additions from the config
|
||||
Object functionMappingsObj = initArgs.get("streamFunctions");
|
||||
if(null != functionMappingsObj){
|
||||
NamedList<?> functionMappings = (NamedList<?>)functionMappingsObj;
|
||||
for(Entry<String,?> functionMapping : functionMappings){
|
||||
Class<?> clazz = core.getResourceLoader().findClass((String)functionMapping.getValue(), Expressible.class);
|
||||
streamFactory.withFunctionName(functionMapping.getKey(), clazz);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
|
||||
SolrParams params = req.getParams();
|
||||
params = adjustParams(params);
|
||||
req.setParams(params);
|
||||
|
||||
|
||||
TupleStream tupleStream = null;
|
||||
|
||||
try {
|
||||
tupleStream = this.streamFactory.constructStream(params.get("expr"));
|
||||
} catch (Exception e) {
|
||||
//Catch exceptions that occur while the stream is being created. This will include streaming expression parse rules.
|
||||
SolrException.log(logger, e);
|
||||
Map requestContext = req.getContext();
|
||||
requestContext.put("stream", new DummyErrorStream(e));
|
||||
return;
|
||||
}
|
||||
|
||||
StreamContext context = new StreamContext();
|
||||
context.setSolrClientCache(StreamHandler.clientCache);
|
||||
context.put("core", this.coreName);
|
||||
Traversal traversal = new Traversal();
|
||||
context.put("traversal", traversal);
|
||||
tupleStream.setStreamContext(context);
|
||||
Map requestContext = req.getContext();
|
||||
requestContext.put("stream", new TimerStream(new ExceptionStream(tupleStream)));
|
||||
requestContext.put("traversal", traversal);
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return "StreamHandler";
|
||||
}
|
||||
|
||||
public String getSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
public static class DummyErrorStream extends TupleStream {
|
||||
private Exception e;
|
||||
|
||||
public DummyErrorStream(Exception e) {
|
||||
this.e = e;
|
||||
}
|
||||
public StreamComparator getStreamSort() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public void close() {
|
||||
}
|
||||
|
||||
public void open() {
|
||||
}
|
||||
|
||||
public Exception getException() {
|
||||
return this.e;
|
||||
}
|
||||
|
||||
public void setStreamContext(StreamContext context) {
|
||||
}
|
||||
|
||||
public List<TupleStream> children() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation toExplanation(StreamFactory factory) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
public Tuple read() {
|
||||
String msg = e.getMessage();
|
||||
Map m = new HashMap();
|
||||
m.put("EOF", true);
|
||||
m.put("EXCEPTION", msg);
|
||||
return new Tuple(m);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private SolrParams adjustParams(SolrParams params) {
|
||||
ModifiableSolrParams adjustedParams = new ModifiableSolrParams();
|
||||
adjustedParams.add(params);
|
||||
adjustedParams.add(CommonParams.OMIT_HEADER, "true");
|
||||
return adjustedParams;
|
||||
}
|
||||
|
||||
public static class TimerStream extends TupleStream {
|
||||
|
||||
private long begin;
|
||||
private TupleStream tupleStream;
|
||||
|
||||
public TimerStream(TupleStream tupleStream) {
|
||||
this.tupleStream = tupleStream;
|
||||
}
|
||||
|
||||
public StreamComparator getStreamSort() {
|
||||
return this.tupleStream.getStreamSort();
|
||||
}
|
||||
|
||||
public void close() throws IOException {
|
||||
this.tupleStream.close();
|
||||
}
|
||||
|
||||
public void open() throws IOException {
|
||||
this.begin = System.nanoTime();
|
||||
this.tupleStream.open();
|
||||
}
|
||||
|
||||
public void setStreamContext(StreamContext context) {
|
||||
this.tupleStream.setStreamContext(context);
|
||||
}
|
||||
|
||||
public List<TupleStream> children() {
|
||||
return this.tupleStream.children();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation toExplanation(StreamFactory factory) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
public Tuple read() throws IOException {
|
||||
Tuple tuple = this.tupleStream.read();
|
||||
if(tuple.EOF) {
|
||||
long totalTime = (System.nanoTime() - begin) / 1000000;
|
||||
tuple.fields.put("RESPONSE_TIME", totalTime);
|
||||
}
|
||||
return tuple;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -565,7 +565,10 @@ public class IndexFetcher {
|
|||
}
|
||||
}
|
||||
|
||||
core.getUpdateHandler().getSolrCoreState().setLastReplicateIndexSuccess(successfulInstall);
|
||||
if (core.getCoreDescriptor().getCoreContainer().isZooKeeperAware()) {
|
||||
// we only track replication success in SolrCloud mode
|
||||
core.getUpdateHandler().getSolrCoreState().setLastReplicateIndexSuccess(successfulInstall);
|
||||
}
|
||||
|
||||
filesToDownload = filesDownloaded = confFilesDownloaded = confFilesToDownload = tlogFilesToDownload = tlogFilesDownloaded = null;
|
||||
markReplicationStop();
|
||||
|
|
|
@ -1515,8 +1515,9 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware , Pe
|
|||
CloudSolrClient cloudSolrClient = this.context.getSolrClientCache().getCloudSolrClient(this.zkHost);
|
||||
cloudSolrClient.connect();
|
||||
ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader();
|
||||
if (zkStateReader.getClusterState().getCollections().size() != 0) {
|
||||
this.tables.addAll(zkStateReader.getClusterState().getCollections());
|
||||
Map<String, DocCollection> collections = zkStateReader.getClusterState().getCollectionsMap();
|
||||
if (collections.size() != 0) {
|
||||
this.tables.addAll(collections.keySet());
|
||||
}
|
||||
Collections.sort(this.tables);
|
||||
}
|
||||
|
|
|
@ -89,20 +89,22 @@ public class ClusterStatus {
|
|||
byte[] bytes = Utils.toJSON(clusterState);
|
||||
Map<String, Object> stateMap = (Map<String,Object>) Utils.fromJSON(bytes);
|
||||
|
||||
Set<String> collections;
|
||||
String routeKey = message.getStr(ShardParams._ROUTE_);
|
||||
String shard = message.getStr(ZkStateReader.SHARD_ID_PROP);
|
||||
|
||||
Map<String, DocCollection> collectionsMap = null;
|
||||
if (collection == null) {
|
||||
collections = new HashSet<>(clusterState.getCollections());
|
||||
collectionsMap = clusterState.getCollectionsMap();
|
||||
} else {
|
||||
collections = Collections.singleton(collection);
|
||||
collectionsMap = Collections.singletonMap(collection, clusterState.getCollectionOrNull(collection));
|
||||
}
|
||||
|
||||
NamedList<Object> collectionProps = new SimpleOrderedMap<>();
|
||||
|
||||
for (String name : collections) {
|
||||
for (Map.Entry<String, DocCollection> entry : collectionsMap.entrySet()) {
|
||||
Map<String, Object> collectionStatus;
|
||||
DocCollection clusterStateCollection = clusterState.getCollectionOrNull(name);
|
||||
String name = entry.getKey();
|
||||
DocCollection clusterStateCollection = entry.getValue();
|
||||
if (clusterStateCollection == null) {
|
||||
if (collection != null) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Collection: " + name + " not found");
|
||||
|
|
|
@ -679,11 +679,8 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
|
|||
@Override
|
||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
|
||||
NamedList<Object> results = new NamedList<>();
|
||||
Set<String> collections = handler.coreContainer.getZkController().getZkStateReader().getClusterState().getCollections();
|
||||
List<String> collectionList = new ArrayList<>();
|
||||
for (String collection : collections) {
|
||||
collectionList.add(collection);
|
||||
}
|
||||
Map<String, DocCollection> collections = handler.coreContainer.getZkController().getZkStateReader().getClusterState().getCollectionsMap();
|
||||
List<String> collectionList = new ArrayList<>(collections.keySet());
|
||||
results.add("collections", collectionList);
|
||||
SolrResponse response = new OverseerSolrResponse(results);
|
||||
rsp.getValues().addAll(response.getResponse());
|
||||
|
|
|
@ -0,0 +1,167 @@
|
|||
package org.apache.solr.response;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.Writer;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.solr.client.solrj.io.graph.Traversal;
|
||||
import org.apache.solr.client.solrj.io.stream.TupleStream;
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.handler.GraphHandler;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
public class GraphMLResponseWriter implements QueryResponseWriter {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
public void init(NamedList args) {
|
||||
/* NOOP */
|
||||
}
|
||||
|
||||
public String getContentType(SolrQueryRequest req, SolrQueryResponse res) {
|
||||
return "application/xml";
|
||||
}
|
||||
|
||||
public void write(Writer writer, SolrQueryRequest req, SolrQueryResponse res) throws IOException {
|
||||
|
||||
Exception e1 = res.getException();
|
||||
if(e1 != null) {
|
||||
e1.printStackTrace(new PrintWriter(writer));
|
||||
return;
|
||||
}
|
||||
|
||||
TupleStream stream = (TupleStream)req.getContext().get("stream");
|
||||
|
||||
if(stream instanceof GraphHandler.DummyErrorStream) {
|
||||
GraphHandler.DummyErrorStream d = (GraphHandler.DummyErrorStream)stream;
|
||||
Exception e = d.getException();
|
||||
e.printStackTrace(new PrintWriter(writer));
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
Traversal traversal = (Traversal)req.getContext().get("traversal");
|
||||
PrintWriter printWriter = new PrintWriter(writer);
|
||||
|
||||
try {
|
||||
|
||||
stream.open();
|
||||
|
||||
Tuple tuple = null;
|
||||
|
||||
int edgeCount = 0;
|
||||
|
||||
printWriter.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
|
||||
printWriter.println("<graphml xmlns=\"http://graphml.graphdrawing.org/xmlns\" ");
|
||||
printWriter.println("xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" ");
|
||||
printWriter.print("xsi:schemaLocation=\"http://graphml.graphdrawing.org/xmlns ");
|
||||
printWriter.println("http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd\">");
|
||||
|
||||
printWriter.println("<graph id=\"G\" edgedefault=\"directed\">");
|
||||
|
||||
while (true) {
|
||||
//Output the graph
|
||||
tuple = stream.read();
|
||||
if (tuple.EOF) {
|
||||
break;
|
||||
}
|
||||
|
||||
String id = tuple.getString("node");
|
||||
|
||||
if (traversal.isMultiCollection()) {
|
||||
id = tuple.getString("collection") + "." + id;
|
||||
}
|
||||
|
||||
writer.write("<node id=\""+replace(id)+"\"");
|
||||
|
||||
List<String> outfields = new ArrayList();
|
||||
Iterator<String> keys = tuple.fields.keySet().iterator();
|
||||
while(keys.hasNext()) {
|
||||
String key = keys.next();
|
||||
if(key.equals("node") || key.equals("ancestors") || key.equals("collection")) {
|
||||
continue;
|
||||
} else {
|
||||
outfields.add(key);
|
||||
}
|
||||
}
|
||||
|
||||
if (outfields.size() > 0) {
|
||||
printWriter.println(">");
|
||||
for (String nodeAttribute : outfields) {
|
||||
Object o = tuple.get(nodeAttribute);
|
||||
if (o != null) {
|
||||
printWriter.println("<data key=\""+nodeAttribute+"\">" + o.toString() + "</data>");
|
||||
}
|
||||
}
|
||||
printWriter.println("</node>");
|
||||
} else {
|
||||
printWriter.println("/>");
|
||||
}
|
||||
|
||||
List<String> ancestors = tuple.getStrings("ancestors");
|
||||
|
||||
if(ancestors != null) {
|
||||
for (String ancestor : ancestors) {
|
||||
++edgeCount;
|
||||
writer.write("<edge id=\"" + edgeCount + "\" ");
|
||||
writer.write(" source=\"" + replace(ancestor) + "\" ");
|
||||
printWriter.println(" target=\"" + replace(id) + "\"/>");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writer.write("</graph></graphml>");
|
||||
} finally {
|
||||
stream.close();
|
||||
}
|
||||
}
|
||||
|
||||
private String replace(String s) {
|
||||
if(s.indexOf(">") > -1) {
|
||||
s = s.replace(">", ">");
|
||||
}
|
||||
|
||||
if(s.indexOf("<") > -1) {
|
||||
s = s.replace("<", "<");
|
||||
}
|
||||
|
||||
if(s.indexOf("\"")> -1) {
|
||||
s = s.replace("\"", """);
|
||||
}
|
||||
|
||||
if(s.indexOf("'") > -1) {
|
||||
s = s.replace("'", "'");
|
||||
}
|
||||
|
||||
if(s.indexOf("&") > -1) {
|
||||
s = s.replace("&", "&");
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
}
|
|
@ -815,15 +815,15 @@ public class HttpSolrCall {
|
|||
private void getSlicesForCollections(ClusterState clusterState,
|
||||
Collection<Slice> slices, boolean activeSlices) {
|
||||
if (activeSlices) {
|
||||
for (String collection : clusterState.getCollections()) {
|
||||
final Collection<Slice> activeCollectionSlices = clusterState.getActiveSlices(collection);
|
||||
for (Map.Entry<String, DocCollection> entry : clusterState.getCollectionsMap().entrySet()) {
|
||||
final Collection<Slice> activeCollectionSlices = entry.getValue().getActiveSlices();
|
||||
if (activeCollectionSlices != null) {
|
||||
slices.addAll(activeCollectionSlices);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (String collection : clusterState.getCollections()) {
|
||||
final Collection<Slice> collectionSlices = clusterState.getSlices(collection);
|
||||
for (Map.Entry<String, DocCollection> entry : clusterState.getCollectionsMap().entrySet()) {
|
||||
final Collection<Slice> collectionSlices = entry.getValue().getSlices();
|
||||
if (collectionSlices != null) {
|
||||
slices.addAll(collectionSlices);
|
||||
}
|
||||
|
|
|
@ -57,7 +57,6 @@ import org.apache.solr.core.SolrConfig;
|
|||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.apache.solr.request.SolrQueryRequestBase;
|
||||
import org.apache.solr.util.RTimer;
|
||||
import org.apache.solr.util.RTimerTree;
|
||||
|
||||
import static org.apache.solr.common.params.CommonParams.PATH;
|
||||
|
@ -564,21 +563,18 @@ public class SolrRequestParsers
|
|||
upload.setSizeMax( ((long) uploadLimitKB) * 1024L );
|
||||
|
||||
// Parse the request
|
||||
List items = upload.parseRequest(req);
|
||||
Iterator iter = items.iterator();
|
||||
while (iter.hasNext()) {
|
||||
FileItem item = (FileItem) iter.next();
|
||||
|
||||
// If it's a form field, put it in our parameter map
|
||||
if (item.isFormField()) {
|
||||
MultiMapSolrParams.addParam(
|
||||
item.getFieldName().trim(),
|
||||
item.getString(), params.getMap() );
|
||||
}
|
||||
// Add the stream
|
||||
else {
|
||||
streams.add( new FileItemContentStream( item ) );
|
||||
}
|
||||
List<FileItem> items = upload.parseRequest(req);
|
||||
for (FileItem item : items) {
|
||||
// If it's a form field, put it in our parameter map
|
||||
if (item.isFormField()) {
|
||||
MultiMapSolrParams.addParam(
|
||||
item.getFieldName().trim(),
|
||||
item.getString(), params.getMap() );
|
||||
}
|
||||
// Add the stream
|
||||
else {
|
||||
streams.add( new FileItemContentStream( item ) );
|
||||
}
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
|
|
@ -1883,7 +1883,7 @@ public class SolrCLI {
|
|||
log.warn("Skipping safety checks, configuration directory "+configName+" will be deleted with impunity.");
|
||||
} else {
|
||||
// need to scan all Collections to see if any are using the config
|
||||
Set<String> collections = zkStateReader.getClusterState().getCollections();
|
||||
Set<String> collections = zkStateReader.getClusterState().getCollectionsMap().keySet();
|
||||
|
||||
// give a little note to the user if there are many collections in case it takes a while
|
||||
if (collections.size() > 50)
|
||||
|
|
|
@ -84,6 +84,13 @@
|
|||
"distrib": false
|
||||
}
|
||||
},
|
||||
"/graph": {
|
||||
"class": "solr.GraphHandler",
|
||||
"invariants": {
|
||||
"wt": "graphml",
|
||||
"distrib": false
|
||||
}
|
||||
},
|
||||
"/stream": {
|
||||
"class": "solr.StreamHandler",
|
||||
"invariants": {
|
||||
|
|
|
@ -112,6 +112,7 @@ public class MinimalSchemaTest extends SolrTestCaseJ4 {
|
|||
handler.startsWith("/config") ||
|
||||
handler.startsWith("/mlt") ||
|
||||
handler.startsWith("/export") ||
|
||||
handler.startsWith("/graph") ||
|
||||
handler.startsWith("/sql") ||
|
||||
handler.startsWith("/stream")
|
||||
) {
|
||||
|
|
|
@ -66,21 +66,21 @@ public class ClusterStateTest extends SolrTestCaseJ4 {
|
|||
|
||||
assertEquals("Provided liveNodes not used properly", 2, loadedClusterState
|
||||
.getLiveNodes().size());
|
||||
assertEquals("No collections found", 2, loadedClusterState.getCollections().size());
|
||||
assertEquals("Poperties not copied properly", replica.getStr("prop1"), loadedClusterState.getSlice("collection1", "shard1").getReplicasMap().get("node1").getStr("prop1"));
|
||||
assertEquals("Poperties not copied properly", replica.getStr("prop2"), loadedClusterState.getSlice("collection1", "shard1").getReplicasMap().get("node1").getStr("prop2"));
|
||||
assertEquals("No collections found", 2, loadedClusterState.getCollectionsMap().size());
|
||||
assertEquals("Properties not copied properly", replica.getStr("prop1"), loadedClusterState.getSlice("collection1", "shard1").getReplicasMap().get("node1").getStr("prop1"));
|
||||
assertEquals("Properties not copied properly", replica.getStr("prop2"), loadedClusterState.getSlice("collection1", "shard1").getReplicasMap().get("node1").getStr("prop2"));
|
||||
|
||||
loadedClusterState = ClusterState.load(-1, new byte[0], liveNodes);
|
||||
|
||||
assertEquals("Provided liveNodes not used properly", 2, loadedClusterState
|
||||
.getLiveNodes().size());
|
||||
assertEquals("Should not have collections", 0, loadedClusterState.getCollections().size());
|
||||
assertEquals("Should not have collections", 0, loadedClusterState.getCollectionsMap().size());
|
||||
|
||||
loadedClusterState = ClusterState.load(-1, (byte[])null, liveNodes);
|
||||
|
||||
assertEquals("Provided liveNodes not used properly", 2, loadedClusterState
|
||||
.getLiveNodes().size());
|
||||
assertEquals("Should not have collections", 0, loadedClusterState.getCollections().size());
|
||||
assertEquals("Should not have collections", 0, loadedClusterState.getCollectionsMap().size());
|
||||
}
|
||||
|
||||
public static ZkStateReader getMockZkStateReader(final Set<String> collections) {
|
||||
|
|
|
@ -1088,7 +1088,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
|
|||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Could not find collection in :"
|
||||
+ clusterState.getCollections());
|
||||
+ clusterState.getCollectionsMap());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -525,7 +525,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
int maxIterations = 100;
|
||||
while (0 < maxIterations--) {
|
||||
final ClusterState state = stateReader.getClusterState();
|
||||
Set<String> availableCollections = state.getCollections();
|
||||
Set<String> availableCollections = state.getCollectionsMap().keySet();
|
||||
int availableCount = 0;
|
||||
for(String requiredCollection: collections) {
|
||||
if(availableCollections.contains(requiredCollection)) {
|
||||
|
@ -911,8 +911,9 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ClusterState state = reader.getClusterState();
|
||||
|
||||
int numFound = 0;
|
||||
for (String c : state.getCollections()) {
|
||||
DocCollection collection = state.getCollection(c);
|
||||
Map<String, DocCollection> collectionsMap = state.getCollectionsMap();
|
||||
for (Map.Entry<String, DocCollection> entry : collectionsMap.entrySet()) {
|
||||
DocCollection collection = entry.getValue();
|
||||
for (Slice slice : collection.getSlices()) {
|
||||
if (slice.getReplicasMap().get("core_node1") != null) {
|
||||
numFound++;
|
||||
|
|
|
@ -87,7 +87,9 @@ public class TestCloudBackupRestore extends SolrCloudTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
create.process(cluster.getSolrClient());
|
||||
CloudSolrClient solrClient = cluster.getSolrClient();
|
||||
create.process(solrClient);
|
||||
|
||||
indexDocs(collectionName);
|
||||
|
||||
if (!isImplicit && random().nextBoolean()) {
|
||||
|
@ -95,14 +97,14 @@ public class TestCloudBackupRestore extends SolrCloudTestCase {
|
|||
int prevActiveSliceCount = getActiveSliceCount(collectionName);
|
||||
CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(collectionName);
|
||||
splitShard.setShardName("shard1");
|
||||
splitShard.process(cluster.getSolrClient());
|
||||
splitShard.process(solrClient);
|
||||
// wait until we see one more active slice...
|
||||
for (int i = 0; getActiveSliceCount(collectionName) != prevActiveSliceCount + 1; i++) {
|
||||
assertTrue(i < 30);
|
||||
Thread.sleep(500);
|
||||
}
|
||||
// issue a hard commit. Split shard does a soft commit which isn't good enough for the backup/snapshooter to see
|
||||
cluster.getSolrClient().commit();
|
||||
solrClient.commit(collectionName);
|
||||
}
|
||||
|
||||
testBackupAndRestore(collectionName);
|
||||
|
@ -119,8 +121,6 @@ public class TestCloudBackupRestore extends SolrCloudTestCase {
|
|||
log.info("Indexing ZERO test docs");
|
||||
return;
|
||||
}
|
||||
CloudSolrClient client = cluster.getSolrClient();
|
||||
client.setDefaultCollection(collectionName);
|
||||
List<SolrInputDocument> docs = new ArrayList<>(numDocs);
|
||||
for (int i=0; i<numDocs; i++) {
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
|
@ -128,8 +128,9 @@ public class TestCloudBackupRestore extends SolrCloudTestCase {
|
|||
doc.addField("shard_s", "shard" + (1 + random.nextInt(NUM_SHARDS))); // for implicit router
|
||||
docs.add(doc);
|
||||
}
|
||||
client.add(docs);// batch
|
||||
client.commit();
|
||||
CloudSolrClient client = cluster.getSolrClient();
|
||||
client.add(collectionName, docs);// batch
|
||||
client.commit(collectionName);
|
||||
}
|
||||
|
||||
private void testBackupAndRestore(String collectionName) throws Exception {
|
||||
|
|
|
@ -101,6 +101,7 @@ public class SolrCoreTest extends SolrTestCaseJ4 {
|
|||
++ihCount; assertEquals(pathToClassMap.get("/schema"), "solr.SchemaHandler");
|
||||
++ihCount; assertEquals(pathToClassMap.get("/sql"), "solr.SQLHandler");
|
||||
++ihCount; assertEquals(pathToClassMap.get("/stream"), "solr.StreamHandler");
|
||||
++ihCount; assertEquals(pathToClassMap.get("/graph"), "solr.GraphHandler");
|
||||
++ihCount; assertEquals(pathToClassMap.get("/update"), "solr.UpdateRequestHandler");
|
||||
++ihCount; assertEquals(pathToClassMap.get("/update/csv"), "solr.UpdateRequestHandler");
|
||||
++ihCount; assertEquals(pathToClassMap.get("/update/json"), "solr.UpdateRequestHandler");
|
||||
|
|
|
@ -58,6 +58,7 @@ import org.apache.solr.common.SolrDocument;
|
|||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
|
@ -586,6 +587,83 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
|
|||
assertEquals(nDocs+1, numFound(rQuery(nDocs+1, "*:*", slaveClient)));
|
||||
}
|
||||
|
||||
/**
|
||||
* We assert that if master is down for more than poll interval,
|
||||
* the slave doesn't re-fetch the whole index from master again if
|
||||
* the index hasn't changed. See SOLR-9036
|
||||
*/
|
||||
@Test
|
||||
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-9036")
|
||||
public void doTestIndexFetchOnMasterRestart() throws Exception {
|
||||
useFactory(null);
|
||||
try {
|
||||
clearIndexWithReplication();
|
||||
// change solrconfig having 'replicateAfter startup' option on master
|
||||
master.copyConfigFile(CONF_DIR + "solrconfig-master2.xml",
|
||||
"solrconfig.xml");
|
||||
|
||||
masterJetty.stop();
|
||||
masterJetty.start();
|
||||
|
||||
nDocs--;
|
||||
for (int i = 0; i < nDocs; i++)
|
||||
index(masterClient, "id", i, "name", "name = " + i);
|
||||
|
||||
masterClient.commit();
|
||||
|
||||
NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient);
|
||||
SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response");
|
||||
assertEquals(nDocs, numFound(masterQueryRsp));
|
||||
|
||||
//get docs from slave and check if number is equal to master
|
||||
NamedList slaveQueryRsp = rQuery(nDocs, "*:*", slaveClient);
|
||||
SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response");
|
||||
assertEquals(nDocs, numFound(slaveQueryRsp));
|
||||
|
||||
//compare results
|
||||
String cmp = BaseDistributedSearchTestCase.compare(masterQueryResult, slaveQueryResult, 0, null);
|
||||
assertEquals(null, cmp);
|
||||
|
||||
assertEquals(1, Integer.parseInt(getSlaveDetails("timesIndexReplicated")));
|
||||
String timesFailed = getSlaveDetails("timesFailed");
|
||||
assertEquals(0, Integer.parseInt(timesFailed != null ? timesFailed : "0"));
|
||||
|
||||
masterJetty.stop();
|
||||
|
||||
// poll interval on slave is 1 second, so we just sleep for a few seconds
|
||||
Thread.sleep(2000);
|
||||
|
||||
masterJetty.start();
|
||||
|
||||
// poll interval on slave is 1 second, so we just sleep for a few seconds
|
||||
Thread.sleep(2000);
|
||||
|
||||
//get docs from slave and assert that they are still the same as before
|
||||
slaveQueryRsp = rQuery(nDocs, "*:*", slaveClient);
|
||||
slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response");
|
||||
assertEquals(nDocs, numFound(slaveQueryRsp));
|
||||
|
||||
int failed = Integer.parseInt(getSlaveDetails("timesFailed"));
|
||||
assertTrue(failed > 0);
|
||||
assertEquals(1, Integer.parseInt(getSlaveDetails("timesIndexReplicated")) - failed);
|
||||
} finally {
|
||||
resetFactory();
|
||||
}
|
||||
}
|
||||
|
||||
private String getSlaveDetails(String keyName) throws SolrServerException, IOException {
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
params.set(CommonParams.QT, "/replication");
|
||||
params.set("command", "details");
|
||||
QueryResponse response = slaveClient.query(params);
|
||||
System.out.println("SHALIN: " + response.getResponse());
|
||||
// details/slave/timesIndexReplicated
|
||||
NamedList<Object> details = (NamedList<Object>) response.getResponse().get("details");
|
||||
NamedList<Object> slave = (NamedList<Object>) details.get("slave");
|
||||
Object o = slave.get(keyName);
|
||||
return o != null ? o.toString() : null;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void doTestIndexFetchWithMasterUrl() throws Exception {
|
||||
//change solrconfig on slave
|
||||
|
|
|
@ -2496,7 +2496,7 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
|
|||
assertEquals(2, tuples.size());
|
||||
|
||||
List<String> collections = new ArrayList<>();
|
||||
collections.addAll(cloudClient.getZkStateReader().getClusterState().getCollections());
|
||||
collections.addAll(cloudClient.getZkStateReader().getClusterState().getCollectionsMap().keySet());
|
||||
Collections.sort(collections);
|
||||
for (Tuple tuple : tuples) {
|
||||
assertEquals(zkServer.getZkAddress(), tuple.getString("TABLE_CAT"));
|
||||
|
@ -2510,7 +2510,7 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase {
|
|||
assertEquals(2, tuples.size());
|
||||
|
||||
collections = new ArrayList<>();
|
||||
collections.addAll(cloudClient.getZkStateReader().getClusterState().getCollections());
|
||||
collections.addAll(cloudClient.getZkStateReader().getClusterState().getCollectionsMap().keySet());
|
||||
Collections.sort(collections);
|
||||
for (Tuple tuple : tuples) {
|
||||
assertEquals(zkServer.getZkAddress(), tuple.getString("TABLE_CAT"));
|
||||
|
|
|
@ -0,0 +1,155 @@
|
|||
package org.apache.solr.response;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.StringWriter;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.client.solrj.io.comp.StreamComparator;
|
||||
import org.apache.solr.client.solrj.io.graph.Traversal;
|
||||
import org.apache.solr.client.solrj.io.stream.TupleStream;
|
||||
import org.apache.solr.client.solrj.io.stream.StreamContext;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestGraphMLResponseWriter extends SolrTestCaseJ4 {
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_
|
||||
initCore("solrconfig.xml","schema12.xml");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGraphMLOutput() throws Exception {
|
||||
SolrQueryRequest request = req("blah", "blah"); // Just need a request to attach the stream and traversal to.
|
||||
SolrQueryResponse response = new SolrQueryResponse();
|
||||
Map context = request.getContext();
|
||||
TupleStream stream = new TestStream(); //Simulates a GatherNodesStream
|
||||
Traversal traversal = new Traversal();
|
||||
context.put("traversal", traversal);
|
||||
context.put("stream", stream);
|
||||
StringWriter writer = new StringWriter();
|
||||
|
||||
GraphMLResponseWriter graphMLResponseWriter = new GraphMLResponseWriter();
|
||||
graphMLResponseWriter.write(writer, request, response);
|
||||
String graphML = writer.toString();
|
||||
|
||||
//Validate the nodes
|
||||
String error = h.validateXPath(graphML,
|
||||
"//graph/node[1][@id ='bill']",
|
||||
"//graph/node[2][@id ='jim']",
|
||||
"//graph/node[3][@id ='max']");
|
||||
if(error != null) {
|
||||
throw new Exception(error);
|
||||
}
|
||||
//Validate the edges
|
||||
error = h.validateXPath(graphML,
|
||||
"//graph/edge[1][@source ='jim']",
|
||||
"//graph/edge[1][@target ='bill']",
|
||||
"//graph/edge[2][@source ='max']",
|
||||
"//graph/edge[2][@target ='bill']",
|
||||
"//graph/edge[3][@source ='max']",
|
||||
"//graph/edge[3][@target ='jim']",
|
||||
"//graph/edge[4][@source ='jim']",
|
||||
"//graph/edge[4][@target ='max']"
|
||||
);
|
||||
|
||||
if(error != null) {
|
||||
throw new Exception(error);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private class TestStream extends TupleStream {
|
||||
|
||||
private Iterator<Tuple> tuples;
|
||||
|
||||
public TestStream() {
|
||||
//Create some nodes
|
||||
List<Tuple> testTuples = new ArrayList();
|
||||
Map m1 = new HashMap();
|
||||
|
||||
List<String> an1 = new ArrayList();
|
||||
an1.add("jim");
|
||||
an1.add("max");
|
||||
m1.put("node", "bill");
|
||||
m1.put("ancestors", an1);
|
||||
testTuples.add(new Tuple(m1));
|
||||
|
||||
Map m2 = new HashMap();
|
||||
List<String> an2 = new ArrayList();
|
||||
an2.add("max");
|
||||
m2.put("node", "jim");
|
||||
m2.put("ancestors", an2);
|
||||
testTuples.add(new Tuple(m2));
|
||||
|
||||
Map m3 = new HashMap();
|
||||
List<String> an3 = new ArrayList();
|
||||
an3.add("jim");
|
||||
m3.put("node", "max");
|
||||
m3.put("ancestors", an3);
|
||||
testTuples.add(new Tuple(m3));
|
||||
|
||||
tuples = testTuples.iterator();
|
||||
}
|
||||
|
||||
public StreamComparator getStreamSort() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public void close() {
|
||||
|
||||
}
|
||||
|
||||
public void open() {
|
||||
|
||||
}
|
||||
|
||||
public List<TupleStream> children() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public Tuple read() {
|
||||
if(tuples.hasNext()) {
|
||||
return tuples.next();
|
||||
} else {
|
||||
Map map = new HashMap();
|
||||
map.put("EOF", true);
|
||||
return new Tuple(map);
|
||||
}
|
||||
}
|
||||
|
||||
public void setStreamContext(StreamContext streamContext) {
|
||||
|
||||
}
|
||||
|
||||
public Explanation toExplanation(StreamFactory factory) {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -618,14 +618,14 @@ public class CloudSolrClient extends SolrClient {
|
|||
return null;
|
||||
}
|
||||
|
||||
NamedList<Throwable> exceptions = new NamedList<>();
|
||||
NamedList<NamedList> shardResponses = new NamedList<>();
|
||||
|
||||
Map<String, LBHttpSolrClient.Req> routes = updateRequest.getRoutes(router, col, urlMap, routableParams, this.idField);
|
||||
if (routes == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final NamedList<Throwable> exceptions = new NamedList<>();
|
||||
final NamedList<NamedList> shardResponses = new NamedList<>(routes.size()+1); // +1 for deleteQuery
|
||||
|
||||
long start = System.nanoTime();
|
||||
|
||||
if (parallelUpdates) {
|
||||
|
|
|
@ -51,6 +51,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParamete
|
|||
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
import org.apache.solr.common.cloud.DocCollection;
|
||||
import org.apache.solr.common.cloud.Replica;
|
||||
import org.apache.solr.common.cloud.Slice;
|
||||
import org.apache.solr.common.cloud.ZkCoreNodeProps;
|
||||
|
@ -329,16 +330,17 @@ public class CloudSolrStream extends TupleStream implements Expressible {
|
|||
|
||||
Collection<Slice> slices = clusterState.getActiveSlices(this.collection);
|
||||
|
||||
if(slices == null) {
|
||||
if (slices == null) {
|
||||
//Try case insensitive match
|
||||
for(String col : clusterState.getCollections()) {
|
||||
if(col.equalsIgnoreCase(collection)) {
|
||||
slices = clusterState.getActiveSlices(col);
|
||||
Map<String, DocCollection> collectionsMap = clusterState.getCollectionsMap();
|
||||
for (Map.Entry<String, DocCollection> entry : collectionsMap.entrySet()) {
|
||||
if (entry.getKey().equalsIgnoreCase(collection)) {
|
||||
slices = entry.getValue().getActiveSlices();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(slices == null) {
|
||||
if (slices == null) {
|
||||
throw new Exception("Collection not found:" + this.collection);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,8 @@ public class HashJoinStream extends TupleStream implements Expressible {
|
|||
|
||||
protected TupleStream hashStream;
|
||||
protected TupleStream fullStream;
|
||||
protected List<String> hashOn;
|
||||
protected List<String> leftHashOn;
|
||||
protected List<String> rightHashOn;
|
||||
protected HashMap<Integer, List<Tuple>> hashedTuples;
|
||||
|
||||
protected Tuple workingFullTuple = null;
|
||||
|
@ -97,8 +98,25 @@ public class HashJoinStream extends TupleStream implements Expressible {
|
|||
private void init(TupleStream fullStream, TupleStream hashStream, List<String> hashOn) throws IOException {
|
||||
this.fullStream = fullStream;
|
||||
this.hashStream = hashStream;
|
||||
this.hashOn = hashOn;
|
||||
this.hashedTuples = new HashMap<>();
|
||||
this.leftHashOn = new ArrayList<>();
|
||||
this.rightHashOn = new ArrayList<>();
|
||||
|
||||
for(String hasher : hashOn){
|
||||
String[] parts = hasher.split("=");
|
||||
if(1 == parts.length){
|
||||
String field = parts[0].trim();
|
||||
leftHashOn.add(field);
|
||||
rightHashOn.add(field);
|
||||
}
|
||||
else if(2 == parts.length){
|
||||
leftHashOn.add(parts[0].trim());
|
||||
rightHashOn.add(parts[1].trim());
|
||||
}
|
||||
else{
|
||||
throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - invalid 'on' parameter - expecting 1 or more instances if 'field' or 'field=hashedField' but found '%s'",hasher));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -127,12 +145,24 @@ public class HashJoinStream extends TupleStream implements Expressible {
|
|||
|
||||
// on
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for(String part : hashOn){
|
||||
for(int idx = 0; idx < leftHashOn.size(); ++idx){
|
||||
if(sb.length() > 0){ sb.append(","); }
|
||||
sb.append(part);
|
||||
|
||||
// we know that left and right hashOns are the same size
|
||||
String left = leftHashOn.get(idx);
|
||||
String right = rightHashOn.get(idx);
|
||||
|
||||
if(left.equals(right)){
|
||||
sb.append(left);
|
||||
}
|
||||
else{
|
||||
sb.append(left);
|
||||
sb.append("=");
|
||||
sb.append(right);
|
||||
}
|
||||
}
|
||||
expression.addParameter(new StreamExpressionNamedParameter("on",sb.toString()));
|
||||
|
||||
expression.addParameter(new StreamExpressionNamedParameter("on",sb.toString()));
|
||||
return expression;
|
||||
}
|
||||
|
||||
|
@ -168,7 +198,7 @@ public class HashJoinStream extends TupleStream implements Expressible {
|
|||
|
||||
Tuple tuple = hashStream.read();
|
||||
while(!tuple.EOF){
|
||||
Integer hash = calculateHash(tuple);
|
||||
Integer hash = calculateHash(tuple, rightHashOn);
|
||||
if(null != hash){
|
||||
if(hashedTuples.containsKey(hash)){
|
||||
hashedTuples.get(hash).add(tuple);
|
||||
|
@ -183,7 +213,7 @@ public class HashJoinStream extends TupleStream implements Expressible {
|
|||
}
|
||||
}
|
||||
|
||||
protected Integer calculateHash(Tuple tuple){
|
||||
protected Integer calculateHash(Tuple tuple, List<String> hashOn){
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for(String part : hashOn){
|
||||
Object obj = tuple.get(part);
|
||||
|
@ -191,7 +221,7 @@ public class HashJoinStream extends TupleStream implements Expressible {
|
|||
return null;
|
||||
}
|
||||
sb.append(obj.toString());
|
||||
sb.append("::"); // this is here to seperate fields
|
||||
sb.append("::"); // this is here to separate fields
|
||||
}
|
||||
|
||||
return sb.toString().hashCode();
|
||||
|
@ -215,7 +245,7 @@ public class HashJoinStream extends TupleStream implements Expressible {
|
|||
|
||||
// If fullTuple doesn't have a valid hash or if there is no doc to
|
||||
// join with then retry loop - keep going until we find one
|
||||
Integer fullHash = calculateHash(fullTuple);
|
||||
Integer fullHash = calculateHash(fullTuple, leftHashOn);
|
||||
if(null == fullHash || !hashedTuples.containsKey(fullHash)){
|
||||
continue findNextWorkingFullTuple;
|
||||
}
|
||||
|
|
|
@ -65,9 +65,21 @@ public class OuterHashJoinStream extends HashJoinStream implements Expressible {
|
|||
|
||||
// on
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for(String part : hashOn){
|
||||
for(int idx = 0; idx < leftHashOn.size(); ++idx){
|
||||
if(sb.length() > 0){ sb.append(","); }
|
||||
sb.append(part);
|
||||
|
||||
// we know that left and right hashOns are the same size
|
||||
String left = leftHashOn.get(idx);
|
||||
String right = rightHashOn.get(idx);
|
||||
|
||||
if(left.equals(right)){
|
||||
sb.append(left);
|
||||
}
|
||||
else{
|
||||
sb.append(left);
|
||||
sb.append("=");
|
||||
sb.append(right);
|
||||
}
|
||||
}
|
||||
expression.addParameter(new StreamExpressionNamedParameter("on",sb.toString()));
|
||||
|
||||
|
@ -87,7 +99,7 @@ public class OuterHashJoinStream extends HashJoinStream implements Expressible {
|
|||
// If fullTuple doesn't have a valid hash or the hash cannot be found in the hashedTuples then
|
||||
// return the tuple from fullStream.
|
||||
// This is an outer join so there is no requirement there be a matching value in the hashed stream
|
||||
Integer fullHash = calculateHash(fullTuple);
|
||||
Integer fullHash = calculateHash(fullTuple, leftHashOn);
|
||||
if(null == fullHash || !hashedTuples.containsKey(fullHash)){
|
||||
return fullTuple.clone();
|
||||
}
|
||||
|
|
|
@ -215,7 +215,7 @@ public class SolrStream extends TupleStream {
|
|||
throw new IOException("--> "+this.baseUrl+":"+e.getMessage());
|
||||
} catch (Exception e) {
|
||||
//The Stream source did not provide an exception in a format that the SolrStream could propagate.
|
||||
throw new IOException("--> "+this.baseUrl+": An exception has occurred on the server, refer to server log for details.");
|
||||
throw new IOException("--> "+this.baseUrl+": An exception has occurred on the server, refer to server log for details.", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -463,16 +463,17 @@ public class TopicStream extends CloudSolrStream implements Expressible {
|
|||
|
||||
Collection<Slice> slices = clusterState.getActiveSlices(this.collection);
|
||||
|
||||
if(slices == null) {
|
||||
if (slices == null) {
|
||||
//Try case insensitive match
|
||||
for(String col : clusterState.getCollections()) {
|
||||
if(col.equalsIgnoreCase(collection)) {
|
||||
slices = clusterState.getActiveSlices(col);
|
||||
Map<String, DocCollection> collectionsMap = clusterState.getCollectionsMap();
|
||||
for (Map.Entry<String, DocCollection> entry : collectionsMap.entrySet()) {
|
||||
if (entry.getKey().equalsIgnoreCase(collection)) {
|
||||
slices = entry.getValue().getActiveSlices();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(slices == null) {
|
||||
if (slices == null) {
|
||||
throw new Exception("Collection not found:" + this.collection);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -188,6 +188,7 @@ public class UpdateStream extends TupleStream implements Expressible {
|
|||
child.setImplementingClass(getClass().getName());
|
||||
child.setExpressionType(ExpressionType.STREAM_DECORATOR);
|
||||
child.setExpression(toExpression(factory, false).toString());
|
||||
child.addChild(tupleSource.toExplanation(factory));
|
||||
|
||||
explanation.addChild(child);
|
||||
|
||||
|
|
|
@ -16,20 +16,14 @@
|
|||
*/
|
||||
package org.apache.solr.client.solrj.io.stream.metrics;
|
||||
|
||||
import java.io.Serializable;
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
|
||||
public class Bucket implements Serializable {
|
||||
public class Bucket {
|
||||
|
||||
private static final long serialVersionUID = 1;
|
||||
private static final String NULL_VALUE = "NULL";
|
||||
|
||||
private String bucketKey;
|
||||
|
||||
public Bucket() {
|
||||
|
||||
}
|
||||
|
||||
public Bucket(String bucketKey) {
|
||||
this.bucketKey = bucketKey;
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.solr.client.solrj.io.stream.metrics;
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
|
@ -24,15 +23,13 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
|
|||
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
|
||||
public class CountMetric extends Metric implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1;
|
||||
|
||||
public class CountMetric extends Metric {
|
||||
private long count;
|
||||
|
||||
public CountMetric(){
|
||||
init("count");
|
||||
}
|
||||
|
||||
public CountMetric(StreamExpression expression, StreamFactory factory) throws IOException{
|
||||
// grab all parameters out
|
||||
String functionName = expression.getFunctionName();
|
||||
|
@ -63,7 +60,7 @@ public class CountMetric extends Metric implements Serializable {
|
|||
++count;
|
||||
}
|
||||
|
||||
public double getValue() {
|
||||
public Long getValue() {
|
||||
return count;
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
package org.apache.solr.client.solrj.io.stream.metrics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
|
@ -25,11 +24,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
|
|||
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
|
||||
public class MaxMetric extends Metric implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1;
|
||||
|
||||
public static final String MAX = "max";
|
||||
public class MaxMetric extends Metric {
|
||||
private long longMax = -Long.MIN_VALUE;
|
||||
private double doubleMax = -Double.MAX_VALUE;
|
||||
private String columnName;
|
||||
|
@ -37,6 +32,7 @@ public class MaxMetric extends Metric implements Serializable {
|
|||
public MaxMetric(String columnName){
|
||||
init("max", columnName);
|
||||
}
|
||||
|
||||
public MaxMetric(StreamExpression expression, StreamFactory factory) throws IOException{
|
||||
// grab all parameters out
|
||||
String functionName = expression.getFunctionName();
|
||||
|
@ -58,8 +54,8 @@ public class MaxMetric extends Metric implements Serializable {
|
|||
setFunctionName(functionName);
|
||||
setIdentifier(functionName, "(", columnName, ")");
|
||||
}
|
||||
|
||||
public double getValue() {
|
||||
|
||||
public Number getValue() {
|
||||
if(longMax == Long.MIN_VALUE) {
|
||||
return doubleMax;
|
||||
} else {
|
||||
|
@ -68,8 +64,7 @@ public class MaxMetric extends Metric implements Serializable {
|
|||
}
|
||||
|
||||
public String[] getColumns() {
|
||||
String[] cols = {columnName};
|
||||
return cols;
|
||||
return new String[]{columnName};
|
||||
}
|
||||
|
||||
public void update(Tuple tuple) {
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
package org.apache.solr.client.solrj.io.stream.metrics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
|
@ -25,14 +24,12 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
|
|||
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
|
||||
public class MeanMetric extends Metric implements Serializable {
|
||||
public class MeanMetric extends Metric {
|
||||
// How'd the MeanMetric get to be so mean?
|
||||
// Maybe it was born with it.
|
||||
// Maybe it was mayba-mean.
|
||||
//
|
||||
// I'll see myself out.
|
||||
|
||||
private static final long serialVersionUID = 1;
|
||||
|
||||
private String columnName;
|
||||
private double doubleSum;
|
||||
|
@ -42,6 +39,7 @@ public class MeanMetric extends Metric implements Serializable {
|
|||
public MeanMetric(String columnName){
|
||||
init("avg", columnName);
|
||||
}
|
||||
|
||||
public MeanMetric(StreamExpression expression, StreamFactory factory) throws IOException{
|
||||
// grab all parameters out
|
||||
String functionName = expression.getFunctionName();
|
||||
|
@ -69,10 +67,10 @@ public class MeanMetric extends Metric implements Serializable {
|
|||
Object o = tuple.get(columnName);
|
||||
if(o instanceof Double) {
|
||||
Double d = (Double)tuple.get(columnName);
|
||||
doubleSum += d.doubleValue();
|
||||
doubleSum += d;
|
||||
} else {
|
||||
Long l = (Long)tuple.get(columnName);
|
||||
longSum += l.doubleValue();
|
||||
longSum += l;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -81,19 +79,16 @@ public class MeanMetric extends Metric implements Serializable {
|
|||
}
|
||||
|
||||
public String[] getColumns() {
|
||||
String[] cols = {columnName};
|
||||
return cols;
|
||||
return new String[]{columnName};
|
||||
}
|
||||
|
||||
public double getValue() {
|
||||
public Double getValue() {
|
||||
double dcount = (double)count;
|
||||
if(longSum == 0) {
|
||||
double ave = doubleSum/dcount;
|
||||
return ave;
|
||||
return doubleSum/dcount;
|
||||
|
||||
} else {
|
||||
double ave = longSum/dcount;
|
||||
return ave;
|
||||
return longSum/dcount;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
package org.apache.solr.client.solrj.io.stream.metrics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
|
@ -26,19 +25,16 @@ import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
|
|||
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
|
||||
public abstract class Metric implements Serializable, Expressible {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
public abstract class Metric implements Expressible {
|
||||
|
||||
private UUID metricNodeId = UUID.randomUUID();
|
||||
private String functionName;
|
||||
private String identifier;
|
||||
|
||||
// @Override
|
||||
|
||||
public String getFunctionName(){
|
||||
return functionName;
|
||||
}
|
||||
|
||||
// @Override
|
||||
|
||||
public void setFunctionName(String functionName){
|
||||
this.functionName = functionName;
|
||||
}
|
||||
|
@ -59,7 +55,7 @@ public abstract class Metric implements Serializable, Expressible {
|
|||
|
||||
@Override
|
||||
public Explanation toExplanation(StreamFactory factory) throws IOException {
|
||||
return new Explanation(metricNodeId.toString())
|
||||
return new Explanation(getMetricNodeId().toString())
|
||||
.withFunctionName(functionName)
|
||||
.withImplementingClass(getClass().getName())
|
||||
.withExpression(toExpression(factory).toString())
|
||||
|
@ -70,7 +66,7 @@ public abstract class Metric implements Serializable, Expressible {
|
|||
return metricNodeId;
|
||||
}
|
||||
|
||||
public abstract double getValue();
|
||||
public abstract Number getValue();
|
||||
public abstract void update(Tuple tuple);
|
||||
public abstract Metric newInstance();
|
||||
public abstract String[] getColumns();
|
||||
|
|
|
@ -25,8 +25,6 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
|
|||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
|
||||
public class MinMetric extends Metric {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private long longMin = Long.MAX_VALUE;
|
||||
private double doubleMin = Double.MAX_VALUE;
|
||||
|
@ -35,6 +33,7 @@ public class MinMetric extends Metric {
|
|||
public MinMetric(String columnName){
|
||||
init("min", columnName);
|
||||
}
|
||||
|
||||
public MinMetric(StreamExpression expression, StreamFactory factory) throws IOException{
|
||||
// grab all parameters out
|
||||
String functionName = expression.getFunctionName();
|
||||
|
@ -57,13 +56,11 @@ public class MinMetric extends Metric {
|
|||
setIdentifier(functionName, "(", columnName, ")");
|
||||
}
|
||||
|
||||
|
||||
public String[] getColumns() {
|
||||
String[] cols = {columnName};
|
||||
return cols;
|
||||
return new String[]{columnName};
|
||||
}
|
||||
|
||||
public double getValue() {
|
||||
public Number getValue() {
|
||||
if(longMin == Long.MAX_VALUE) {
|
||||
return doubleMin;
|
||||
} else {
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
package org.apache.solr.client.solrj.io.stream.metrics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
|
@ -25,10 +24,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
|
|||
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
|
||||
public class SumMetric extends Metric implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1;
|
||||
|
||||
public class SumMetric extends Metric {
|
||||
private String columnName;
|
||||
private double doubleSum;
|
||||
private long longSum;
|
||||
|
@ -36,6 +32,7 @@ public class SumMetric extends Metric implements Serializable {
|
|||
public SumMetric(String columnName){
|
||||
init("sum", columnName);
|
||||
}
|
||||
|
||||
public SumMetric(StreamExpression expression, StreamFactory factory) throws IOException{
|
||||
// grab all parameters out
|
||||
String functionName = expression.getFunctionName();
|
||||
|
@ -59,18 +56,17 @@ public class SumMetric extends Metric implements Serializable {
|
|||
}
|
||||
|
||||
public String[] getColumns() {
|
||||
String[] cols = {columnName};
|
||||
return cols;
|
||||
return new String[]{columnName};
|
||||
}
|
||||
|
||||
public void update(Tuple tuple) {
|
||||
Object o = tuple.get(columnName);
|
||||
if(o instanceof Double) {
|
||||
Double d = (Double)o;
|
||||
doubleSum += d.doubleValue();
|
||||
doubleSum += d;
|
||||
} else {
|
||||
Long l = (Long)o;
|
||||
longSum += l.longValue();
|
||||
longSum += l;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,11 +74,11 @@ public class SumMetric extends Metric implements Serializable {
|
|||
return new SumMetric(columnName);
|
||||
}
|
||||
|
||||
public double getValue() {
|
||||
public Number getValue() {
|
||||
if(longSum == 0) {
|
||||
return doubleSum;
|
||||
} else {
|
||||
return (double)longSum;
|
||||
return longSum;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1042,6 +1042,10 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
|
|||
return new RequestStatus(requestId);
|
||||
}
|
||||
|
||||
public static void waitForAsyncRequest(String requestId, SolrClient client, long timeout) throws SolrServerException, InterruptedException, IOException {
|
||||
requestStatus(requestId).waitFor(client, timeout);
|
||||
}
|
||||
|
||||
// REQUESTSTATUS request
|
||||
public static class RequestStatus extends CollectionAdminRequest<RequestStatusResponse> {
|
||||
|
||||
|
|
|
@ -30,13 +30,17 @@ import java.util.Map.Entry;
|
|||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
|
||||
import org.apache.solr.client.solrj.response.UpdateResponse;
|
||||
import org.apache.solr.client.solrj.util.ClientUtils;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.cloud.DocCollection;
|
||||
import org.apache.solr.common.cloud.DocRouter;
|
||||
import org.apache.solr.common.cloud.Slice;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.params.UpdateParams;
|
||||
import org.apache.solr.common.util.ContentStream;
|
||||
import org.apache.solr.common.util.XML;
|
||||
|
||||
|
@ -103,6 +107,10 @@ public class UpdateRequest extends AbstractUpdateRequest {
|
|||
return this;
|
||||
}
|
||||
|
||||
public UpdateRequest add(String... fields) {
|
||||
return add(new SolrInputDocument(fields));
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a SolrInputDocument to this request
|
||||
* @param doc the document
|
||||
|
@ -209,6 +217,13 @@ public class UpdateRequest extends AbstractUpdateRequest {
|
|||
deleteQuery.add(q);
|
||||
return this;
|
||||
}
|
||||
|
||||
public UpdateResponse commit(SolrClient client, String collection) throws IOException, SolrServerException {
|
||||
if (params == null)
|
||||
params = new ModifiableSolrParams();
|
||||
params.set(UpdateParams.COMMIT, "true");
|
||||
return process(client, collection);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param router to route updates with
|
||||
|
@ -383,7 +398,7 @@ public class UpdateRequest extends AbstractUpdateRequest {
|
|||
/**
|
||||
* @since solr 1.4
|
||||
*/
|
||||
public void writeXML(Writer writer) throws IOException {
|
||||
public UpdateRequest writeXML(Writer writer) throws IOException {
|
||||
List<Map<SolrInputDocument,Map<String,Object>>> getDocLists = getDocLists(documents);
|
||||
|
||||
for (Map<SolrInputDocument,Map<String,Object>> docs : getDocLists) {
|
||||
|
@ -457,6 +472,7 @@ public class UpdateRequest extends AbstractUpdateRequest {
|
|||
}
|
||||
writer.append("</delete>");
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
|
|
@ -38,8 +38,12 @@ public class SolrInputDocument extends SolrDocumentBase<SolrInputField, SolrInpu
|
|||
private float _documentBoost = 1.0f;
|
||||
private List<SolrInputDocument> _childDocuments;
|
||||
|
||||
public SolrInputDocument() {
|
||||
public SolrInputDocument(String... fields) {
|
||||
_fields = new LinkedHashMap<>();
|
||||
assert fields.length % 2 == 0;
|
||||
for (int i = 0; i < fields.length; i += 2) {
|
||||
addField(fields[i], fields[i + 1]);
|
||||
}
|
||||
}
|
||||
|
||||
public SolrInputDocument(Map<String,SolrInputField> fields) {
|
||||
|
|
|
@ -218,7 +218,10 @@ public class ClusterState implements JSONWriter.Writable {
|
|||
* {@link CollectionRef#get()} which can make a call to ZooKeeper. This is necessary
|
||||
* because the semantics of how collection list is loaded have changed in SOLR-6629.
|
||||
* Please see javadocs in {@link ZkStateReader#refreshCollectionList(Watcher)}
|
||||
*
|
||||
* @deprecated use {@link #getCollectionsMap()} to avoid a second lookup for lazy collections
|
||||
*/
|
||||
@Deprecated
|
||||
public Set<String> getCollections() {
|
||||
Set<String> result = new HashSet<>();
|
||||
for (Entry<String, CollectionRef> entry : collectionStates.entrySet()) {
|
||||
|
@ -229,6 +232,27 @@ public class ClusterState implements JSONWriter.Writable {
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a map of collection name vs DocCollection objects
|
||||
*
|
||||
* Implementation note: This method resolves the collection reference by calling
|
||||
* {@link CollectionRef#get()} which can make a call to ZooKeeper. This is necessary
|
||||
* because the semantics of how collection list is loaded have changed in SOLR-6629.
|
||||
* Please see javadocs in {@link ZkStateReader#refreshCollectionList(Watcher)}
|
||||
*
|
||||
* @return a map of collection name vs DocCollection object
|
||||
*/
|
||||
public Map<String, DocCollection> getCollectionsMap() {
|
||||
Map<String, DocCollection> result = new HashMap<>(collectionStates.size());
|
||||
for (Entry<String, CollectionRef> entry : collectionStates.entrySet()) {
|
||||
DocCollection collection = entry.getValue().get();
|
||||
if (collection != null) {
|
||||
result.put(entry.getKey(), collection);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get names of the currently live nodes.
|
||||
*/
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.lang.invoke.MethodHandles;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
@ -66,14 +67,14 @@ public class ClusterStateUtil {
|
|||
success = true;
|
||||
ClusterState clusterState = zkStateReader.getClusterState();
|
||||
if (clusterState != null) {
|
||||
Set<String> collections;
|
||||
Map<String, DocCollection> collections = null;
|
||||
if (collection != null) {
|
||||
collections = Collections.singleton(collection);
|
||||
collections = Collections.singletonMap(collection, clusterState.getCollection(collection));
|
||||
} else {
|
||||
collections = clusterState.getCollections();
|
||||
collections = clusterState.getCollectionsMap();
|
||||
}
|
||||
for (String coll : collections) {
|
||||
DocCollection docCollection = clusterState.getCollection(coll);
|
||||
for (Map.Entry<String, DocCollection> entry : collections.entrySet()) {
|
||||
DocCollection docCollection = entry.getValue();
|
||||
Collection<Slice> slices = docCollection.getSlices();
|
||||
for (Slice slice : slices) {
|
||||
// only look at active shards
|
||||
|
@ -178,14 +179,14 @@ public class ClusterStateUtil {
|
|||
success = true;
|
||||
ClusterState clusterState = zkStateReader.getClusterState();
|
||||
if (clusterState != null) {
|
||||
Set<String> collections;
|
||||
if (collection == null) {
|
||||
collections = clusterState.getCollections();
|
||||
Map<String, DocCollection> collections = null;
|
||||
if (collection != null) {
|
||||
collections = Collections.singletonMap(collection, clusterState.getCollection(collection));
|
||||
} else {
|
||||
collections = Collections.singleton(collection);
|
||||
collections = clusterState.getCollectionsMap();
|
||||
}
|
||||
for (String coll : collections) {
|
||||
DocCollection docCollection = clusterState.getCollection(coll);
|
||||
for (Map.Entry<String, DocCollection> entry : collections.entrySet()) {
|
||||
DocCollection docCollection = entry.getValue();
|
||||
Collection<Slice> slices = docCollection.getSlices();
|
||||
for (Slice slice : slices) {
|
||||
// only look at active shards
|
||||
|
|
|
@ -41,7 +41,8 @@ public class DocCollection extends ZkNodeProps {
|
|||
public static final String STATE_FORMAT = "stateFormat";
|
||||
public static final String RULE = "rule";
|
||||
public static final String SNITCH = "snitch";
|
||||
private int znodeVersion = -1; // sentinel
|
||||
|
||||
private final int znodeVersion;
|
||||
|
||||
private final String name;
|
||||
private final Map<String, Slice> slices;
|
||||
|
@ -55,7 +56,7 @@ public class DocCollection extends ZkNodeProps {
|
|||
|
||||
|
||||
public DocCollection(String name, Map<String, Slice> slices, Map<String, Object> props, DocRouter router) {
|
||||
this(name, slices, props, router, -1, ZkStateReader.CLUSTER_STATE);
|
||||
this(name, slices, props, router, Integer.MAX_VALUE, ZkStateReader.CLUSTER_STATE);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -64,8 +65,9 @@ public class DocCollection extends ZkNodeProps {
|
|||
* @param props The properties of the slice. This is used directly and a copy is not made.
|
||||
*/
|
||||
public DocCollection(String name, Map<String, Slice> slices, Map<String, Object> props, DocRouter router, int zkVersion, String znode) {
|
||||
super(props==null ? props = new HashMap<String,Object>() : props);
|
||||
this.znodeVersion = zkVersion;
|
||||
super(props==null ? props = new HashMap<>() : props);
|
||||
// -1 means any version in ZK CAS, so we choose Integer.MAX_VALUE instead to avoid accidental overwrites
|
||||
this.znodeVersion = zkVersion == -1 ? Integer.MAX_VALUE : zkVersion;
|
||||
this.name = name;
|
||||
|
||||
this.slices = slices;
|
||||
|
|
|
@ -520,7 +520,8 @@ public class ZkStateReader implements Closeable {
|
|||
* In fact this is a clever way to avoid doing a ZK exists check on
|
||||
* the /collections/collection_name/state.json znode
|
||||
* Such an exists check is done in {@link ClusterState#hasCollection(String)} and
|
||||
* {@link ClusterState#getCollections()} method as a safeguard against exposing wrong collection names to the users
|
||||
* {@link ClusterState#getCollections()} and {@link ClusterState#getCollectionsMap()} methods
|
||||
* have a safeguard against exposing wrong collection names to the users
|
||||
*/
|
||||
private void refreshCollectionList(Watcher watcher) throws KeeperException, InterruptedException {
|
||||
synchronized (refreshCollectionListLock) {
|
||||
|
@ -715,8 +716,7 @@ public class ZkStateReader implements Closeable {
|
|||
Map<String,Slice> slices = clusterState.getSlicesMap(collection);
|
||||
if (slices == null) {
|
||||
throw new ZooKeeperException(ErrorCode.BAD_REQUEST,
|
||||
"Could not find collection in zk: " + collection + " "
|
||||
+ clusterState.getCollections());
|
||||
"Could not find collection in zk: " + collection);
|
||||
}
|
||||
|
||||
Slice replicas = slices.get(shardId);
|
||||
|
|
|
@ -16,6 +16,19 @@
|
|||
*/
|
||||
package org.apache.solr.client.solrj.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
|
@ -26,12 +39,13 @@ import org.apache.solr.client.solrj.SolrClient;
|
|||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
import org.apache.solr.client.solrj.response.UpdateResponse;
|
||||
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
|
||||
import org.apache.solr.cloud.AbstractZkTestCase;
|
||||
import org.apache.solr.cloud.AbstractDistribZkTestBase;
|
||||
import org.apache.solr.cloud.SolrCloudTestCase;
|
||||
import org.apache.solr.common.SolrDocumentList;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
|
@ -46,6 +60,7 @@ import org.apache.solr.common.params.ModifiableSolrParams;
|
|||
import org.apache.solr.common.params.ShardParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
|
@ -53,163 +68,87 @@ import org.junit.rules.ExpectedException;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import static org.apache.solr.cloud.OverseerCollectionMessageHandler.NUM_SLICES;
|
||||
import static org.apache.solr.common.util.Utils.makeMap;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
|
||||
|
||||
|
||||
/**
|
||||
* This test would be faster if we simulated the zk state instead.
|
||||
*/
|
||||
@Slow
|
||||
public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
public class CloudSolrClientTest extends SolrCloudTestCase {
|
||||
|
||||
private static final String SOLR_HOME = getFile("solrj" + File.separator + "solr").getAbsolutePath();
|
||||
private static final String COLLECTION = "collection1";
|
||||
|
||||
private static final String id = "id";
|
||||
|
||||
private static final int TIMEOUT = 30;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeSuperClass() {
|
||||
// this is necessary because AbstractZkTestCase.buildZooKeeper is used by AbstractDistribZkTestBase
|
||||
// and the auto-detected SOLRHOME=TEST_HOME() does not exist for solrj tests
|
||||
// todo fix this
|
||||
AbstractZkTestCase.SOLRHOME = new File(SOLR_HOME());
|
||||
public static void setupCluster() throws Exception {
|
||||
configureCluster(3)
|
||||
.addConfig("conf", getFile("solrj").toPath().resolve("solr").resolve("configsets").resolve("streaming").resolve("conf"))
|
||||
.configure();
|
||||
|
||||
CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1).process(cluster.getSolrClient());
|
||||
AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(),
|
||||
false, true, TIMEOUT);
|
||||
}
|
||||
|
||||
protected String getCloudSolrConfig() {
|
||||
return "solrconfig.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrHome() {
|
||||
return SOLR_HOME;
|
||||
}
|
||||
|
||||
public static String SOLR_HOME() {
|
||||
return SOLR_HOME;
|
||||
}
|
||||
|
||||
public CloudSolrClientTest() {
|
||||
super();
|
||||
sliceCount = 2;
|
||||
fixShardCount(3);
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
@Before
|
||||
public void cleanIndex() throws Exception {
|
||||
new UpdateRequest()
|
||||
.deleteByQuery("*:*")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
testParallelUpdateQTime();
|
||||
checkCollectionParameters();
|
||||
allTests();
|
||||
stateVersionParamTest();
|
||||
customHttpClientTest();
|
||||
testOverwriteOption();
|
||||
preferLocalShardsTest();
|
||||
}
|
||||
|
||||
private void testParallelUpdateQTime() throws Exception {
|
||||
public void testParallelUpdateQTime() throws Exception {
|
||||
UpdateRequest req = new UpdateRequest();
|
||||
for (int i=0; i<10; i++) {
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
doc.addField("id", String.valueOf(TestUtil.nextInt(random(), 1000, 1100)));
|
||||
req.add(doc);
|
||||
}
|
||||
UpdateResponse response = req.process(cloudClient);
|
||||
UpdateResponse response = req.process(cluster.getSolrClient(), COLLECTION);
|
||||
// See SOLR-6547, we just need to ensure that no exception is thrown here
|
||||
assertTrue(response.getQTime() >= 0);
|
||||
}
|
||||
|
||||
private void testOverwriteOption() throws Exception, SolrServerException,
|
||||
IOException {
|
||||
String collectionName = "overwriteCollection";
|
||||
createCollection(collectionName, controlClientCloud, 1, 1);
|
||||
waitForRecoveriesToFinish(collectionName, false);
|
||||
try (CloudSolrClient cloudClient = createCloudClient(collectionName)) {
|
||||
SolrInputDocument doc1 = new SolrInputDocument();
|
||||
doc1.addField(id, "0");
|
||||
doc1.addField("a_t", "hello1");
|
||||
SolrInputDocument doc2 = new SolrInputDocument();
|
||||
doc2.addField(id, "0");
|
||||
doc2.addField("a_t", "hello2");
|
||||
|
||||
UpdateRequest request = new UpdateRequest();
|
||||
request.add(doc1);
|
||||
request.add(doc2);
|
||||
request.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, false);
|
||||
NamedList<Object> response = cloudClient.request(request);
|
||||
QueryResponse resp = cloudClient.query(new SolrQuery("*:*"));
|
||||
|
||||
assertEquals("There should be one document because overwrite=true", 1, resp.getResults().getNumFound());
|
||||
|
||||
doc1 = new SolrInputDocument();
|
||||
doc1.addField(id, "1");
|
||||
doc1.addField("a_t", "hello1");
|
||||
doc2 = new SolrInputDocument();
|
||||
doc2.addField(id, "1");
|
||||
doc2.addField("a_t", "hello2");
|
||||
|
||||
request = new UpdateRequest();
|
||||
// overwrite=false
|
||||
request.add(doc1, false);
|
||||
request.add(doc2, false);
|
||||
request.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, false);
|
||||
response = cloudClient.request(request);
|
||||
|
||||
resp = cloudClient.query(new SolrQuery("*:*"));
|
||||
@Test
|
||||
public void testOverwriteOption() throws Exception {
|
||||
|
||||
CollectionAdminRequest.createCollection("overwrite", "conf", 1, 1)
|
||||
.processAndWait(cluster.getSolrClient(), TIMEOUT);
|
||||
AbstractDistribZkTestBase.waitForRecoveriesToFinish("overwrite", cluster.getSolrClient().getZkStateReader(), false, true, TIMEOUT);
|
||||
|
||||
new UpdateRequest()
|
||||
.add("id", "0", "a_t", "hello1")
|
||||
.add("id", "0", "a_t", "hello2")
|
||||
.commit(cluster.getSolrClient(), "overwrite");
|
||||
|
||||
QueryResponse resp = cluster.getSolrClient().query("overwrite", new SolrQuery("*:*"));
|
||||
assertEquals("There should be one document because overwrite=true", 1, resp.getResults().getNumFound());
|
||||
|
||||
new UpdateRequest()
|
||||
.add(new SolrInputDocument(id, "1", "a_t", "hello1"), /* overwrite = */ false)
|
||||
.add(new SolrInputDocument(id, "1", "a_t", "hello2"), false)
|
||||
.commit(cluster.getSolrClient(), "overwrite");
|
||||
|
||||
resp = cluster.getSolrClient().query("overwrite", new SolrQuery("*:*"));
|
||||
assertEquals("There should be 3 documents because there should be two id=1 docs due to overwrite=false", 3, resp.getResults().getNumFound());
|
||||
|
||||
assertEquals("There should be 3 documents because there should be two id=1 docs due to overwrite=false", 3, resp.getResults().getNumFound());
|
||||
}
|
||||
}
|
||||
|
||||
private void allTests() throws Exception {
|
||||
|
||||
String collectionName = "clientTestExternColl";
|
||||
createCollection(collectionName, controlClientCloud, 2, 2);
|
||||
waitForRecoveriesToFinish(collectionName, false);
|
||||
CloudSolrClient cloudClient = createCloudClient(collectionName);
|
||||
|
||||
assertNotNull(cloudClient);
|
||||
@Test
|
||||
public void testRouting() throws Exception {
|
||||
|
||||
handle.clear();
|
||||
handle.put("timestamp", SKIPVAL);
|
||||
|
||||
waitForThingsToLevelOut(30);
|
||||
|
||||
controlClient.deleteByQuery("*:*");
|
||||
cloudClient.deleteByQuery("*:*");
|
||||
|
||||
|
||||
controlClient.commit();
|
||||
this.cloudClient.commit();
|
||||
|
||||
SolrInputDocument doc1 = new SolrInputDocument();
|
||||
doc1.addField(id, "0");
|
||||
doc1.addField("a_t", "hello1");
|
||||
SolrInputDocument doc2 = new SolrInputDocument();
|
||||
doc2.addField(id, "2");
|
||||
doc2.addField("a_t", "hello2");
|
||||
|
||||
UpdateRequest request = new UpdateRequest();
|
||||
request.add(doc1);
|
||||
request.add(doc2);
|
||||
request.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, false);
|
||||
AbstractUpdateRequest request = new UpdateRequest()
|
||||
.add(id, "0", "a_t", "hello1")
|
||||
.add(id, "2", "a_t", "hello2")
|
||||
.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
|
||||
|
||||
// Test single threaded routed updates for UpdateRequest
|
||||
NamedList<Object> response = cloudClient.request(request);
|
||||
NamedList<Object> response = cluster.getSolrClient().request(request, COLLECTION);
|
||||
CloudSolrClient.RouteResponse rr = (CloudSolrClient.RouteResponse) response;
|
||||
Map<String,LBHttpSolrClient.Req> routes = rr.getRoutes();
|
||||
Iterator<Map.Entry<String,LBHttpSolrClient.Req>> it = routes.entrySet()
|
||||
|
@ -234,22 +173,19 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
// Test the deleteById routing for UpdateRequest
|
||||
|
||||
UpdateRequest delRequest = new UpdateRequest();
|
||||
delRequest.deleteById("0");
|
||||
delRequest.deleteById("2");
|
||||
delRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, false);
|
||||
cloudClient.request(delRequest);
|
||||
ModifiableSolrParams qParams = new ModifiableSolrParams();
|
||||
qParams.add("q", "*:*");
|
||||
QueryRequest qRequest = new QueryRequest(qParams);
|
||||
QueryResponse qResponse = qRequest.process(cloudClient);
|
||||
new UpdateRequest()
|
||||
.deleteById("0")
|
||||
.deleteById("2")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
QueryResponse qResponse = cluster.getSolrClient().query(COLLECTION, new SolrQuery("*:*"));
|
||||
SolrDocumentList docs = qResponse.getResults();
|
||||
assertTrue(docs.getNumFound() == 0);
|
||||
assertEquals(0, docs.getNumFound());
|
||||
|
||||
// Test Multi-Threaded routed updates for UpdateRequest
|
||||
try (CloudSolrClient threadedClient = getCloudSolrClient(zkServer.getZkAddress())) {
|
||||
try (CloudSolrClient threadedClient = getCloudSolrClient(cluster.getZkServer().getZkAddress())) {
|
||||
threadedClient.setParallelUpdates(true);
|
||||
threadedClient.setDefaultCollection(collectionName);
|
||||
threadedClient.setDefaultCollection(COLLECTION);
|
||||
response = threadedClient.request(request);
|
||||
rr = (CloudSolrClient.RouteResponse) response;
|
||||
routes = rr.getRoutes();
|
||||
|
@ -277,13 +213,13 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
|||
// Test that queries with _route_ params are routed by the client
|
||||
|
||||
// Track request counts on each node before query calls
|
||||
ClusterState clusterState = cloudClient.getZkStateReader().getClusterState();
|
||||
DocCollection col = clusterState.getCollection(collectionName);
|
||||
ClusterState clusterState = cluster.getSolrClient().getZkStateReader().getClusterState();
|
||||
DocCollection col = clusterState.getCollection(COLLECTION);
|
||||
Map<String, Long> requestCountsMap = Maps.newHashMap();
|
||||
for (Slice slice : col.getSlices()) {
|
||||
for (Replica replica : slice.getReplicas()) {
|
||||
String baseURL = (String) replica.get(ZkStateReader.BASE_URL_PROP);
|
||||
requestCountsMap.put(baseURL, getNumRequests(baseURL,collectionName));
|
||||
requestCountsMap.put(baseURL, getNumRequests(baseURL, COLLECTION));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -328,7 +264,7 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
|||
ModifiableSolrParams solrParams = new ModifiableSolrParams();
|
||||
solrParams.set(CommonParams.Q, "*:*");
|
||||
solrParams.set(ShardParams._ROUTE_, sameShardRoutes.get(random().nextInt(sameShardRoutes.size())));
|
||||
log.info("output : {}" ,cloudClient.query(solrParams));
|
||||
log.info("output: {}", cluster.getSolrClient().query(COLLECTION, solrParams));
|
||||
}
|
||||
|
||||
// Request counts increase from expected nodes should aggregate to 1000, while there should be
|
||||
|
@ -341,7 +277,7 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
|||
String baseURL = (String) replica.get(ZkStateReader.BASE_URL_PROP);
|
||||
|
||||
Long prevNumRequests = requestCountsMap.get(baseURL);
|
||||
Long curNumRequests = getNumRequests(baseURL, collectionName);
|
||||
Long curNumRequests = getNumRequests(baseURL, COLLECTION);
|
||||
|
||||
long delta = curNumRequests - prevNumRequests;
|
||||
if (expectedBaseURLs.contains(baseURL)) {
|
||||
|
@ -357,74 +293,36 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
|||
assertEquals("Unexpected number of requests to unexpected URLs: " + numRequestsToUnexpectedUrls,
|
||||
0, increaseFromUnexpectedUrls);
|
||||
|
||||
controlClient.deleteByQuery("*:*");
|
||||
cloudClient.deleteByQuery("*:*");
|
||||
|
||||
controlClient.commit();
|
||||
cloudClient.commit();
|
||||
cloudClient.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests if the specification of 'preferLocalShards' in the query-params
|
||||
* limits the distributed query to locally hosted shards only
|
||||
*/
|
||||
private void preferLocalShardsTest() throws Exception {
|
||||
@Test
|
||||
public void preferLocalShardsTest() throws Exception {
|
||||
|
||||
String collectionName = "localShardsTestColl";
|
||||
|
||||
int liveNodes = getCommonCloudSolrClient()
|
||||
.getZkStateReader().getClusterState().getLiveNodes().size();
|
||||
int liveNodes = cluster.getJettySolrRunners().size();
|
||||
|
||||
// For preferLocalShards to succeed in a test, every shard should have
|
||||
// all its cores on the same node.
|
||||
// Hence the below configuration for our collection
|
||||
Map<String, Object> props = makeMap(
|
||||
REPLICATION_FACTOR, liveNodes,
|
||||
MAX_SHARDS_PER_NODE, liveNodes,
|
||||
NUM_SLICES, liveNodes);
|
||||
Map<String,List<Integer>> collectionInfos = new HashMap<String,List<Integer>>();
|
||||
createCollection(collectionInfos, collectionName, props, controlClientCloud);
|
||||
waitForRecoveriesToFinish(collectionName, false);
|
||||
|
||||
CloudSolrClient cloudClient = createCloudClient(collectionName);
|
||||
assertNotNull(cloudClient);
|
||||
handle.clear();
|
||||
handle.put("timestamp", SKIPVAL);
|
||||
waitForThingsToLevelOut(30);
|
||||
|
||||
// Remove any documents from previous test (if any)
|
||||
controlClient.deleteByQuery("*:*");
|
||||
cloudClient.deleteByQuery("*:*");
|
||||
controlClient.commit();
|
||||
cloudClient.commit();
|
||||
CollectionAdminRequest.createCollection(collectionName, "conf", liveNodes, liveNodes)
|
||||
.setMaxShardsPerNode(liveNodes)
|
||||
.processAndWait(cluster.getSolrClient(), TIMEOUT);
|
||||
AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, cluster.getSolrClient().getZkStateReader(), false, true, TIMEOUT);
|
||||
|
||||
// Add some new documents
|
||||
SolrInputDocument doc1 = new SolrInputDocument();
|
||||
doc1.addField(id, "0");
|
||||
doc1.addField("a_t", "hello1");
|
||||
SolrInputDocument doc2 = new SolrInputDocument();
|
||||
doc2.addField(id, "2");
|
||||
doc2.addField("a_t", "hello2");
|
||||
SolrInputDocument doc3 = new SolrInputDocument();
|
||||
doc3.addField(id, "3");
|
||||
doc3.addField("a_t", "hello2");
|
||||
|
||||
UpdateRequest request = new UpdateRequest();
|
||||
request.add(doc1);
|
||||
request.add(doc2);
|
||||
request.add(doc3);
|
||||
request.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, false);
|
||||
new UpdateRequest()
|
||||
.add(id, "0", "a_t", "hello1")
|
||||
.add(id, "2", "a_t", "hello2")
|
||||
.add(id, "3", "a_t", "hello2")
|
||||
.commit(cluster.getSolrClient(), collectionName);
|
||||
|
||||
// Run the actual test for 'preferLocalShards'
|
||||
queryWithPreferLocalShards(cloudClient, true, collectionName);
|
||||
|
||||
// Cleanup
|
||||
controlClient.deleteByQuery("*:*");
|
||||
cloudClient.deleteByQuery("*:*");
|
||||
controlClient.commit();
|
||||
cloudClient.commit();
|
||||
cloudClient.close();
|
||||
queryWithPreferLocalShards(cluster.getSolrClient(), true, collectionName);
|
||||
}
|
||||
|
||||
private void queryWithPreferLocalShards(CloudSolrClient cloudClient,
|
||||
|
@ -432,8 +330,7 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
|||
String collectionName)
|
||||
throws Exception
|
||||
{
|
||||
SolrQuery qRequest = new SolrQuery();
|
||||
qRequest.setQuery("*:*");
|
||||
SolrQuery qRequest = new SolrQuery("*:*");
|
||||
|
||||
ModifiableSolrParams qParams = new ModifiableSolrParams();
|
||||
qParams.add("preferLocalShards", Boolean.toString(preferLocalShards));
|
||||
|
@ -444,7 +341,7 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
|||
// And since all the nodes are hosting cores from all shards, the
|
||||
// distributed query formed by this node will select cores from the
|
||||
// local shards only
|
||||
QueryResponse qResponse = cloudClient.query (qRequest);
|
||||
QueryResponse qResponse = cloudClient.query(collectionName, qRequest);
|
||||
|
||||
Object shardsInfo = qResponse.getResponse().get(ShardParams.SHARDS_INFO);
|
||||
assertNotNull("Unable to obtain "+ShardParams.SHARDS_INFO, shardsInfo);
|
||||
|
@ -495,21 +392,23 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
|||
return (Long) resp.findRecursive("solr-mbeans", "QUERYHANDLER",
|
||||
"standard", "stats", "requests");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void indexr(Object... fields) throws Exception {
|
||||
SolrInputDocument doc = getDoc(fields);
|
||||
indexDoc(doc);
|
||||
}
|
||||
|
||||
private void checkCollectionParameters() throws Exception {
|
||||
@Test
|
||||
public void checkCollectionParameters() throws Exception {
|
||||
|
||||
try (CloudSolrClient client = createCloudClient("multicollection1")) {
|
||||
try (CloudSolrClient client = getCloudSolrClient(cluster.getZkServer().getZkAddress())) {
|
||||
|
||||
createCollection("multicollection1", client, 2, 2);
|
||||
createCollection("multicollection2", client, 2, 2);
|
||||
waitForRecoveriesToFinish("multicollection1", false);
|
||||
waitForRecoveriesToFinish("multicollection2", false);
|
||||
String async1 = CollectionAdminRequest.createCollection("multicollection1", "conf", 2, 1)
|
||||
.processAsync(client);
|
||||
String async2 = CollectionAdminRequest.createCollection("multicollection2", "conf", 2, 1)
|
||||
.processAsync(client);
|
||||
|
||||
CollectionAdminRequest.waitForAsyncRequest(async1, client, TIMEOUT);
|
||||
CollectionAdminRequest.waitForAsyncRequest(async2, client, TIMEOUT);
|
||||
AbstractDistribZkTestBase.waitForRecoveriesToFinish("multicollection1", client.getZkStateReader(), false, true, TIMEOUT);
|
||||
AbstractDistribZkTestBase.waitForRecoveriesToFinish("multicollection2", client.getZkStateReader(), false, true, TIMEOUT);
|
||||
|
||||
client.setDefaultCollection("multicollection1");
|
||||
|
||||
List<SolrInputDocument> docs = new ArrayList<>(3);
|
||||
for (int i = 0; i < 3; i++) {
|
||||
|
@ -540,73 +439,70 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
}
|
||||
|
||||
private void stateVersionParamTest() throws Exception {
|
||||
@Test
|
||||
public void stateVersionParamTest() throws Exception {
|
||||
|
||||
try (CloudSolrClient client = createCloudClient(null)) {
|
||||
String collectionName = "checkStateVerCol";
|
||||
createCollection(collectionName, client, 1, 3);
|
||||
waitForRecoveriesToFinish(collectionName, false);
|
||||
DocCollection coll = client.getZkStateReader().getClusterState().getCollection(collectionName);
|
||||
Replica r = coll.getSlices().iterator().next().getReplicas().iterator().next();
|
||||
DocCollection coll = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION);
|
||||
Replica r = coll.getSlices().iterator().next().getReplicas().iterator().next();
|
||||
|
||||
SolrQuery q = new SolrQuery().setQuery("*:*");
|
||||
HttpSolrClient.RemoteSolrException sse = null;
|
||||
SolrQuery q = new SolrQuery().setQuery("*:*");
|
||||
HttpSolrClient.RemoteSolrException sse = null;
|
||||
|
||||
final String url = r.getStr(ZkStateReader.BASE_URL_PROP) + "/" +collectionName;
|
||||
try (HttpSolrClient solrClient = getHttpSolrClient(url)) {
|
||||
final String url = r.getStr(ZkStateReader.BASE_URL_PROP) + "/" + COLLECTION;
|
||||
try (HttpSolrClient solrClient = getHttpSolrClient(url)) {
|
||||
|
||||
log.info("should work query, result {}", solrClient.query(q));
|
||||
//no problem
|
||||
q.setParam(CloudSolrClient.STATE_VERSION, collectionName + ":" + coll.getZNodeVersion());
|
||||
log.info("2nd query , result {}", solrClient.query(q));
|
||||
//no error yet good
|
||||
log.info("should work query, result {}", solrClient.query(q));
|
||||
//no problem
|
||||
q.setParam(CloudSolrClient.STATE_VERSION, COLLECTION + ":" + coll.getZNodeVersion());
|
||||
log.info("2nd query , result {}", solrClient.query(q));
|
||||
//no error yet good
|
||||
|
||||
q.setParam(CloudSolrClient.STATE_VERSION, collectionName + ":" + (coll.getZNodeVersion() - 1)); //an older version expect error
|
||||
q.setParam(CloudSolrClient.STATE_VERSION, COLLECTION + ":" + (coll.getZNodeVersion() - 1)); //an older version expect error
|
||||
|
||||
QueryResponse rsp = solrClient.query(q);
|
||||
Map m = (Map) rsp.getResponse().get(CloudSolrClient.STATE_VERSION, rsp.getResponse().size()-1);
|
||||
assertNotNull("Expected an extra information from server with the list of invalid collection states", m);
|
||||
assertNotNull(m.get(COLLECTION));
|
||||
}
|
||||
|
||||
//now send the request to another node that does not serve the collection
|
||||
|
||||
Set<String> allNodesOfColl = new HashSet<>();
|
||||
for (Slice slice : coll.getSlices()) {
|
||||
for (Replica replica : slice.getReplicas()) {
|
||||
allNodesOfColl.add(replica.getStr(ZkStateReader.BASE_URL_PROP));
|
||||
}
|
||||
}
|
||||
String theNode = null;
|
||||
Set<String> liveNodes = cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes();
|
||||
for (String s : liveNodes) {
|
||||
String n = cluster.getSolrClient().getZkStateReader().getBaseUrlForNodeName(s);
|
||||
if(!allNodesOfColl.contains(n)){
|
||||
theNode = n;
|
||||
break;
|
||||
}
|
||||
}
|
||||
log.info("the node which does not serve this collection{} ",theNode);
|
||||
assertNotNull(theNode);
|
||||
|
||||
|
||||
final String solrClientUrl = theNode + "/" + COLLECTION;
|
||||
try (SolrClient solrClient = getHttpSolrClient(solrClientUrl)) {
|
||||
|
||||
q.setParam(CloudSolrClient.STATE_VERSION, COLLECTION + ":" + (coll.getZNodeVersion()-1));
|
||||
try {
|
||||
QueryResponse rsp = solrClient.query(q);
|
||||
Map m = (Map) rsp.getResponse().get(CloudSolrClient.STATE_VERSION, rsp.getResponse().size()-1);
|
||||
assertNotNull("Expected an extra information from server with the list of invalid collection states", m);
|
||||
assertNotNull(m.get(collectionName));
|
||||
}
|
||||
|
||||
//now send the request to another node that does not serve the collection
|
||||
|
||||
Set<String> allNodesOfColl = new HashSet<>();
|
||||
for (Slice slice : coll.getSlices()) {
|
||||
for (Replica replica : slice.getReplicas()) {
|
||||
allNodesOfColl.add(replica.getStr(ZkStateReader.BASE_URL_PROP));
|
||||
}
|
||||
}
|
||||
String theNode = null;
|
||||
Set<String> liveNodes = client.getZkStateReader().getClusterState().getLiveNodes();
|
||||
for (String s : liveNodes) {
|
||||
String n = client.getZkStateReader().getBaseUrlForNodeName(s);
|
||||
if(!allNodesOfColl.contains(n)){
|
||||
theNode = n;
|
||||
break;
|
||||
}
|
||||
}
|
||||
log.info("the node which does not serve this collection{} ",theNode);
|
||||
assertNotNull(theNode);
|
||||
|
||||
|
||||
final String solrClientUrl = theNode + "/" + collectionName;
|
||||
try (SolrClient solrClient = getHttpSolrClient(solrClientUrl)) {
|
||||
|
||||
q.setParam(CloudSolrClient.STATE_VERSION, collectionName + ":" + (coll.getZNodeVersion()-1));
|
||||
try {
|
||||
QueryResponse rsp = solrClient.query(q);
|
||||
log.info("error was expected");
|
||||
} catch (HttpSolrClient.RemoteSolrException e) {
|
||||
sse = e;
|
||||
}
|
||||
assertNotNull(sse);
|
||||
assertEquals(" Error code should be 510", SolrException.ErrorCode.INVALID_STATE.code, sse.code());
|
||||
log.info("error was expected");
|
||||
} catch (HttpSolrClient.RemoteSolrException e) {
|
||||
sse = e;
|
||||
}
|
||||
assertNotNull(sse);
|
||||
assertEquals(" Error code should be 510", SolrException.ErrorCode.INVALID_STATE.code, sse.code());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShutdown() throws IOException {
|
||||
try (CloudSolrClient client = getCloudSolrClient("[ff01::114]:33332")) {
|
||||
client.setZkConnectTimeout(100);
|
||||
|
@ -620,22 +516,23 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
|
|||
@Rule
|
||||
public ExpectedException exception = ExpectedException.none();
|
||||
|
||||
@Test
|
||||
public void testWrongZkChrootTest() throws IOException {
|
||||
|
||||
exception.expect(SolrException.class);
|
||||
exception.expectMessage("cluster not found/not ready");
|
||||
|
||||
try (CloudSolrClient client = getCloudSolrClient(zkServer.getZkAddress() + "/xyz/foo")) {
|
||||
client.setDefaultCollection(DEFAULT_COLLECTION);
|
||||
try (CloudSolrClient client = getCloudSolrClient(cluster.getZkServer().getZkAddress() + "/xyz/foo")) {
|
||||
client.setZkClientTimeout(1000 * 60);
|
||||
client.connect();
|
||||
fail("Expected exception");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void customHttpClientTest() throws IOException {
|
||||
CloseableHttpClient client = HttpClientUtil.createClient(null);
|
||||
try (CloudSolrClient solrClient = getCloudSolrClient(zkServer.getZkAddress(), client)) {
|
||||
try (CloudSolrClient solrClient = getCloudSolrClient(cluster.getZkServer().getZkAddress(), client)) {
|
||||
|
||||
assertTrue(solrClient.getLbClient().getHttpClient() == client);
|
||||
|
||||
|
|
|
@ -17,35 +17,44 @@ package org.apache.solr.client.solrj.io.graph;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
import org.apache.solr.client.solrj.SolrRequest;
|
||||
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||
import org.apache.solr.client.solrj.impl.InputStreamResponseParser;
|
||||
import org.apache.solr.client.solrj.io.SolrClientCache;
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.client.solrj.io.comp.ComparatorOrder;
|
||||
import org.apache.solr.client.solrj.io.comp.FieldComparator;
|
||||
import org.apache.solr.client.solrj.io.stream.*;
|
||||
import org.apache.solr.client.solrj.io.stream.CloudSolrStream;
|
||||
import org.apache.solr.client.solrj.io.stream.HashJoinStream;
|
||||
import org.apache.solr.client.solrj.io.stream.StreamContext;
|
||||
import org.apache.solr.client.solrj.io.stream.TupleStream;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.CountMetric;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.MaxMetric;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.MeanMetric;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.MinMetric;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.SumMetric;
|
||||
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
|
||||
import org.apache.solr.cloud.AbstractZkTestCase;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.QueryRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
import org.apache.solr.cloud.AbstractDistribZkTestBase;
|
||||
import org.apache.solr.cloud.SolrCloudTestCase;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
@ -58,96 +67,52 @@ import org.junit.Test;
|
|||
|
||||
@Slow
|
||||
@LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40","Lucene41","Lucene42","Lucene45"})
|
||||
public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
||||
public class GraphExpressionTest extends SolrCloudTestCase {
|
||||
|
||||
private static final String SOLR_HOME = getFile("solrj" + File.separator + "solr").getAbsolutePath();
|
||||
private static final String COLLECTION = "collection1";
|
||||
|
||||
static {
|
||||
schemaString = "schema-streaming.xml";
|
||||
}
|
||||
private static final String id = "id";
|
||||
|
||||
private static final int TIMEOUT = 30;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeSuperClass() {
|
||||
AbstractZkTestCase.SOLRHOME = new File(SOLR_HOME());
|
||||
}
|
||||
public static void setupCluster() throws Exception {
|
||||
configureCluster(2)
|
||||
.addConfig("conf", getFile("solrj").toPath().resolve("solr").resolve("configsets").resolve("streaming").resolve("conf"))
|
||||
.configure();
|
||||
|
||||
@AfterClass
|
||||
public static void afterSuperClass() {
|
||||
|
||||
}
|
||||
|
||||
protected String getCloudSolrConfig() {
|
||||
return "solrconfig-streaming.xml";
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getSolrHome() {
|
||||
return SOLR_HOME;
|
||||
}
|
||||
|
||||
public static String SOLR_HOME() {
|
||||
return SOLR_HOME;
|
||||
CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1).process(cluster.getSolrClient());
|
||||
AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(),
|
||||
false, true, TIMEOUT);
|
||||
}
|
||||
|
||||
@Before
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
// we expect this time of exception as shards go up and down...
|
||||
//ignoreException(".*");
|
||||
|
||||
System.setProperty("numShards", Integer.toString(sliceCount));
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
resetExceptionIgnores();
|
||||
}
|
||||
|
||||
public GraphExpressionTest() {
|
||||
super();
|
||||
sliceCount = 2;
|
||||
public void cleanIndex() throws Exception {
|
||||
new UpdateRequest()
|
||||
.deleteByQuery("*:*")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAll() throws Exception{
|
||||
assertNotNull(cloudClient);
|
||||
public void testShortestPathStream() throws Exception {
|
||||
|
||||
handle.clear();
|
||||
handle.put("timestamp", SKIPVAL);
|
||||
|
||||
waitForRecoveriesToFinish(false);
|
||||
|
||||
del("*:*");
|
||||
commit();
|
||||
|
||||
testShortestPathStream();
|
||||
testGatherNodesStream();
|
||||
testGatherNodesFriendsStream();
|
||||
}
|
||||
|
||||
private void testShortestPathStream() throws Exception {
|
||||
|
||||
indexr(id, "0", "from_s", "jim", "to_s", "mike", "predicate_s", "knows");
|
||||
indexr(id, "1", "from_s", "jim", "to_s", "dave", "predicate_s", "knows");
|
||||
indexr(id, "2", "from_s", "jim", "to_s", "stan", "predicate_s", "knows");
|
||||
indexr(id, "3", "from_s", "dave", "to_s", "stan", "predicate_s", "knows");
|
||||
indexr(id, "4", "from_s", "dave", "to_s", "bill", "predicate_s", "knows");
|
||||
indexr(id, "5", "from_s", "dave", "to_s", "mike", "predicate_s", "knows");
|
||||
indexr(id, "20", "from_s", "dave", "to_s", "alex", "predicate_s", "knows");
|
||||
indexr(id, "21", "from_s", "alex", "to_s", "steve", "predicate_s", "knows");
|
||||
indexr(id, "6", "from_s", "stan", "to_s", "alice", "predicate_s", "knows");
|
||||
indexr(id, "7", "from_s", "stan", "to_s", "mary", "predicate_s", "knows");
|
||||
indexr(id, "8", "from_s", "stan", "to_s", "dave", "predicate_s", "knows");
|
||||
indexr(id, "10", "from_s", "mary", "to_s", "mike", "predicate_s", "knows");
|
||||
indexr(id, "11", "from_s", "mary", "to_s", "max", "predicate_s", "knows");
|
||||
indexr(id, "12", "from_s", "mary", "to_s", "jim", "predicate_s", "knows");
|
||||
indexr(id, "13", "from_s", "mary", "to_s", "steve", "predicate_s", "knows");
|
||||
|
||||
commit();
|
||||
new UpdateRequest()
|
||||
.add(id, "0", "from_s", "jim", "to_s", "mike", "predicate_s", "knows")
|
||||
.add(id, "1", "from_s", "jim", "to_s", "dave", "predicate_s", "knows")
|
||||
.add(id, "2", "from_s", "jim", "to_s", "stan", "predicate_s", "knows")
|
||||
.add(id, "3", "from_s", "dave", "to_s", "stan", "predicate_s", "knows")
|
||||
.add(id, "4", "from_s", "dave", "to_s", "bill", "predicate_s", "knows")
|
||||
.add(id, "5", "from_s", "dave", "to_s", "mike", "predicate_s", "knows")
|
||||
.add(id, "20", "from_s", "dave", "to_s", "alex", "predicate_s", "knows")
|
||||
.add(id, "21", "from_s", "alex", "to_s", "steve", "predicate_s", "knows")
|
||||
.add(id, "6", "from_s", "stan", "to_s", "alice", "predicate_s", "knows")
|
||||
.add(id, "7", "from_s", "stan", "to_s", "mary", "predicate_s", "knows")
|
||||
.add(id, "8", "from_s", "stan", "to_s", "dave", "predicate_s", "knows")
|
||||
.add(id, "10", "from_s", "mary", "to_s", "mike", "predicate_s", "knows")
|
||||
.add(id, "11", "from_s", "mary", "to_s", "max", "predicate_s", "knows")
|
||||
.add(id, "12", "from_s", "mary", "to_s", "jim", "predicate_s", "knows")
|
||||
.add(id, "13", "from_s", "mary", "to_s", "steve", "predicate_s", "knows")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
List<Tuple> tuples = null;
|
||||
Set<String> paths = null;
|
||||
|
@ -157,7 +122,7 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
context.setSolrClientCache(cache);
|
||||
|
||||
StreamFactory factory = new StreamFactory()
|
||||
.withCollectionZkHost("collection1", zkServer.getZkAddress())
|
||||
.withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
|
||||
.withFunctionName("shortestPath", ShortestPathStream.class);
|
||||
|
||||
Map params = new HashMap();
|
||||
|
@ -271,27 +236,26 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
assertTrue(paths.contains("[jim, stan, mary, steve]"));
|
||||
|
||||
cache.close();
|
||||
del("*:*");
|
||||
commit();
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGatherNodesStream() throws Exception {
|
||||
|
||||
private void testGatherNodesStream() throws Exception {
|
||||
|
||||
indexr(id, "0", "basket_s", "basket1", "product_s", "product1", "price_f", "20");
|
||||
indexr(id, "1", "basket_s", "basket1", "product_s", "product3", "price_f", "30");
|
||||
indexr(id, "2", "basket_s", "basket1", "product_s", "product5", "price_f", "1");
|
||||
indexr(id, "3", "basket_s", "basket2", "product_s", "product1", "price_f", "2");
|
||||
indexr(id, "4", "basket_s", "basket2", "product_s", "product6", "price_f", "5");
|
||||
indexr(id, "5", "basket_s", "basket2", "product_s", "product7", "price_f", "10");
|
||||
indexr(id, "6", "basket_s", "basket3", "product_s", "product4", "price_f", "20");
|
||||
indexr(id, "7", "basket_s", "basket3", "product_s", "product3", "price_f", "10");
|
||||
indexr(id, "8", "basket_s", "basket3", "product_s", "product1", "price_f", "10");
|
||||
indexr(id, "9", "basket_s", "basket4", "product_s", "product4", "price_f", "40");
|
||||
indexr(id, "10", "basket_s", "basket4", "product_s", "product3", "price_f", "10");
|
||||
indexr(id, "11", "basket_s", "basket4", "product_s", "product1", "price_f", "10");
|
||||
|
||||
commit();
|
||||
new UpdateRequest()
|
||||
.add(id, "0", "basket_s", "basket1", "product_s", "product1", "price_f", "20")
|
||||
.add(id, "1", "basket_s", "basket1", "product_s", "product3", "price_f", "30")
|
||||
.add(id, "2", "basket_s", "basket1", "product_s", "product5", "price_f", "1")
|
||||
.add(id, "3", "basket_s", "basket2", "product_s", "product1", "price_f", "2")
|
||||
.add(id, "4", "basket_s", "basket2", "product_s", "product6", "price_f", "5")
|
||||
.add(id, "5", "basket_s", "basket2", "product_s", "product7", "price_f", "10")
|
||||
.add(id, "6", "basket_s", "basket3", "product_s", "product4", "price_f", "20")
|
||||
.add(id, "7", "basket_s", "basket3", "product_s", "product3", "price_f", "10")
|
||||
.add(id, "8", "basket_s", "basket3", "product_s", "product1", "price_f", "10")
|
||||
.add(id, "9", "basket_s", "basket4", "product_s", "product4", "price_f", "40")
|
||||
.add(id, "10", "basket_s", "basket4", "product_s", "product3", "price_f", "10")
|
||||
.add(id, "11", "basket_s", "basket4", "product_s", "product1", "price_f", "10")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
List<Tuple> tuples = null;
|
||||
Set<String> paths = null;
|
||||
|
@ -301,7 +265,7 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
context.setSolrClientCache(cache);
|
||||
|
||||
StreamFactory factory = new StreamFactory()
|
||||
.withCollectionZkHost("collection1", zkServer.getZkAddress())
|
||||
.withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
|
||||
.withFunctionName("gatherNodes", GatherNodesStream.class)
|
||||
.withFunctionName("search", CloudSolrStream.class)
|
||||
.withFunctionName("count", CountMetric.class)
|
||||
|
@ -311,8 +275,8 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
.withFunctionName("max", MaxMetric.class);
|
||||
|
||||
String expr = "gatherNodes(collection1, " +
|
||||
"walk=\"product1->product_s\"," +
|
||||
"gather=\"basket_s\")";
|
||||
"walk=\"product1->product_s\"," +
|
||||
"gather=\"basket_s\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr);
|
||||
stream.setStreamContext(context);
|
||||
|
@ -329,9 +293,9 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
//Test maxDocFreq param
|
||||
String docFreqExpr = "gatherNodes(collection1, " +
|
||||
"walk=\"product1, product7->product_s\"," +
|
||||
"maxDocFreq=\"2\","+
|
||||
"gather=\"basket_s\")";
|
||||
"walk=\"product1, product7->product_s\"," +
|
||||
"maxDocFreq=\"2\","+
|
||||
"gather=\"basket_s\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(docFreqExpr);
|
||||
stream.setStreamContext(context);
|
||||
|
@ -344,9 +308,9 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
|
||||
String expr2 = "gatherNodes(collection1, " +
|
||||
expr+","+
|
||||
"walk=\"node->basket_s\"," +
|
||||
"gather=\"product_s\", count(*), avg(price_f), sum(price_f), min(price_f), max(price_f))";
|
||||
expr+","+
|
||||
"walk=\"node->basket_s\"," +
|
||||
"gather=\"product_s\", count(*), avg(price_f), sum(price_f), min(price_f), max(price_f))";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr2);
|
||||
|
||||
|
@ -383,8 +347,8 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
//Test list of root nodes
|
||||
expr = "gatherNodes(collection1, " +
|
||||
"walk=\"product4, product7->product_s\"," +
|
||||
"gather=\"basket_s\")";
|
||||
"walk=\"product4, product7->product_s\"," +
|
||||
"gather=\"basket_s\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr);
|
||||
|
||||
|
@ -401,8 +365,8 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
//Test with negative filter query
|
||||
|
||||
expr = "gatherNodes(collection1, " +
|
||||
"walk=\"product4, product7->product_s\"," +
|
||||
"gather=\"basket_s\", fq=\"-basket_s:basket4\")";
|
||||
"walk=\"product4, product7->product_s\"," +
|
||||
"gather=\"basket_s\", fq=\"-basket_s:basket4\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr);
|
||||
|
||||
|
@ -417,20 +381,20 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
assertTrue(tuples.get(1).getString("node").equals("basket3"));
|
||||
|
||||
cache.close();
|
||||
del("*:*");
|
||||
commit();
|
||||
|
||||
}
|
||||
|
||||
private void testGatherNodesFriendsStream() throws Exception {
|
||||
@Test
|
||||
public void testGatherNodesFriendsStream() throws Exception {
|
||||
|
||||
indexr(id, "0", "from_s", "bill", "to_s", "jim", "message_t", "Hello jim");
|
||||
indexr(id, "1", "from_s", "bill", "to_s", "sam", "message_t", "Hello sam");
|
||||
indexr(id, "2", "from_s", "bill", "to_s", "max", "message_t", "Hello max");
|
||||
indexr(id, "3", "from_s", "max", "to_s", "kip", "message_t", "Hello kip");
|
||||
indexr(id, "4", "from_s", "sam", "to_s", "steve", "message_t", "Hello steve");
|
||||
indexr(id, "5", "from_s", "jim", "to_s", "ann", "message_t", "Hello steve");
|
||||
|
||||
commit();
|
||||
new UpdateRequest()
|
||||
.add(id, "0", "from_s", "bill", "to_s", "jim", "message_t", "Hello jim")
|
||||
.add(id, "1", "from_s", "bill", "to_s", "sam", "message_t", "Hello sam")
|
||||
.add(id, "2", "from_s", "bill", "to_s", "max", "message_t", "Hello max")
|
||||
.add(id, "3", "from_s", "max", "to_s", "kip", "message_t", "Hello kip")
|
||||
.add(id, "4", "from_s", "sam", "to_s", "steve", "message_t", "Hello steve")
|
||||
.add(id, "5", "from_s", "jim", "to_s", "ann", "message_t", "Hello steve")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
List<Tuple> tuples = null;
|
||||
Set<String> paths = null;
|
||||
|
@ -440,7 +404,7 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
context.setSolrClientCache(cache);
|
||||
|
||||
StreamFactory factory = new StreamFactory()
|
||||
.withCollectionZkHost("collection1", zkServer.getZkAddress())
|
||||
.withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
|
||||
.withFunctionName("gatherNodes", GatherNodesStream.class)
|
||||
.withFunctionName("search", CloudSolrStream.class)
|
||||
.withFunctionName("count", CountMetric.class)
|
||||
|
@ -451,8 +415,8 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
.withFunctionName("max", MaxMetric.class);
|
||||
|
||||
String expr = "gatherNodes(collection1, " +
|
||||
"walk=\"bill->from_s\"," +
|
||||
"gather=\"to_s\")";
|
||||
"walk=\"bill->from_s\"," +
|
||||
"gather=\"to_s\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr);
|
||||
stream.setStreamContext(context);
|
||||
|
@ -468,9 +432,9 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
//Test scatter branches, leaves and trackTraversal
|
||||
|
||||
expr = "gatherNodes(collection1, " +
|
||||
"walk=\"bill->from_s\"," +
|
||||
"gather=\"to_s\","+
|
||||
"scatter=\"branches, leaves\", trackTraversal=\"true\")";
|
||||
"walk=\"bill->from_s\"," +
|
||||
"gather=\"to_s\","+
|
||||
"scatter=\"branches, leaves\", trackTraversal=\"true\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr);
|
||||
context = new StreamContext();
|
||||
|
@ -506,9 +470,9 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
// Test query root
|
||||
|
||||
expr = "gatherNodes(collection1, " +
|
||||
"search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\"),"+
|
||||
"walk=\"from_s->from_s\"," +
|
||||
"gather=\"to_s\")";
|
||||
"search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\"),"+
|
||||
"walk=\"from_s->from_s\"," +
|
||||
"gather=\"to_s\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr);
|
||||
context = new StreamContext();
|
||||
|
@ -527,9 +491,9 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
// Test query root scatter branches
|
||||
|
||||
expr = "gatherNodes(collection1, " +
|
||||
"search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\"),"+
|
||||
"walk=\"from_s->from_s\"," +
|
||||
"gather=\"to_s\", scatter=\"branches, leaves\")";
|
||||
"search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\"),"+
|
||||
"walk=\"from_s->from_s\"," +
|
||||
"gather=\"to_s\", scatter=\"branches, leaves\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr);
|
||||
context = new StreamContext();
|
||||
|
@ -550,14 +514,14 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
assertTrue(tuples.get(3).getLong("level").equals(new Long(1)));
|
||||
|
||||
expr = "gatherNodes(collection1, " +
|
||||
"search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\"),"+
|
||||
"walk=\"from_s->from_s\"," +
|
||||
"gather=\"to_s\")";
|
||||
"search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\"),"+
|
||||
"walk=\"from_s->from_s\"," +
|
||||
"gather=\"to_s\")";
|
||||
|
||||
String expr2 = "gatherNodes(collection1, " +
|
||||
expr+","+
|
||||
"walk=\"node->from_s\"," +
|
||||
"gather=\"to_s\")";
|
||||
expr+","+
|
||||
"walk=\"node->from_s\"," +
|
||||
"gather=\"to_s\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr2);
|
||||
context = new StreamContext();
|
||||
|
@ -593,14 +557,14 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
|
||||
expr = "gatherNodes(collection1, " +
|
||||
"search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\"),"+
|
||||
"walk=\"from_s->from_s\"," +
|
||||
"gather=\"to_s\")";
|
||||
"search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\"),"+
|
||||
"walk=\"from_s->from_s\"," +
|
||||
"gather=\"to_s\")";
|
||||
|
||||
expr2 = "gatherNodes(collection1, " +
|
||||
expr+","+
|
||||
"walk=\"node->from_s\"," +
|
||||
"gather=\"to_s\", scatter=\"branches, leaves\")";
|
||||
expr+","+
|
||||
"walk=\"node->from_s\"," +
|
||||
"gather=\"to_s\", scatter=\"branches, leaves\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr2);
|
||||
context = new StreamContext();
|
||||
|
@ -628,20 +592,20 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
assertTrue(tuples.get(6).getLong("level").equals(new Long(2)));
|
||||
|
||||
//Add a cycle from jim to bill
|
||||
indexr(id, "6", "from_s", "jim", "to_s", "bill", "message_t", "Hello steve");
|
||||
indexr(id, "7", "from_s", "sam", "to_s", "bill", "message_t", "Hello steve");
|
||||
|
||||
commit();
|
||||
new UpdateRequest()
|
||||
.add(id, "6", "from_s", "jim", "to_s", "bill", "message_t", "Hello steve")
|
||||
.add(id, "7", "from_s", "sam", "to_s", "bill", "message_t", "Hello steve")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
expr = "gatherNodes(collection1, " +
|
||||
"search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\"),"+
|
||||
"walk=\"from_s->from_s\"," +
|
||||
"gather=\"to_s\", trackTraversal=\"true\")";
|
||||
"search(collection1, q=\"message_t:jim\", fl=\"from_s\", sort=\"from_s asc\"),"+
|
||||
"walk=\"from_s->from_s\"," +
|
||||
"gather=\"to_s\", trackTraversal=\"true\")";
|
||||
|
||||
expr2 = "gatherNodes(collection1, " +
|
||||
expr+","+
|
||||
"walk=\"node->from_s\"," +
|
||||
"gather=\"to_s\", scatter=\"branches, leaves\", trackTraversal=\"true\")";
|
||||
expr+","+
|
||||
"walk=\"node->from_s\"," +
|
||||
"gather=\"to_s\", scatter=\"branches, leaves\", trackTraversal=\"true\")";
|
||||
|
||||
stream = (GatherNodesStream)factory.constructStream(expr2);
|
||||
context = new StreamContext();
|
||||
|
@ -676,10 +640,85 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
assertTrue(tuples.get(6).getLong("level").equals(new Long(2)));
|
||||
|
||||
cache.close();
|
||||
del("*:*");
|
||||
commit();
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGraphHandler() throws Exception {
|
||||
|
||||
|
||||
new UpdateRequest()
|
||||
.add(id, "0", "from_s", "bill", "to_s", "jim", "message_t", "Hello jim")
|
||||
.add(id, "1", "from_s", "bill", "to_s", "sam", "message_t", "Hello sam")
|
||||
.add(id, "2", "from_s", "bill", "to_s", "max", "message_t", "Hello max")
|
||||
.add(id, "3", "from_s", "max", "to_s", "kip", "message_t", "Hello kip")
|
||||
.add(id, "4", "from_s", "sam", "to_s", "steve", "message_t", "Hello steve")
|
||||
.add(id, "5", "from_s", "jim", "to_s", "ann", "message_t", "Hello steve")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
commit();
|
||||
|
||||
List<JettySolrRunner> runners = cluster.getJettySolrRunners();
|
||||
JettySolrRunner runner = runners.get(0);
|
||||
String url = runner.getBaseUrl().toString();
|
||||
|
||||
HttpSolrClient client = new HttpSolrClient(url);
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
|
||||
|
||||
String expr = "sort(by=\"node asc\", gatherNodes(collection1, " +
|
||||
"walk=\"bill->from_s\"," +
|
||||
"trackTraversal=\"true\"," +
|
||||
"gather=\"to_s\"))";
|
||||
|
||||
params.add("expr", expr);
|
||||
QueryRequest query = new QueryRequest(params);
|
||||
query.setPath("/collection1/graph");
|
||||
|
||||
query.setResponseParser(new InputStreamResponseParser("xml"));
|
||||
query.setMethod(SolrRequest.METHOD.POST);
|
||||
|
||||
NamedList<Object> genericResponse = client.request(query);
|
||||
|
||||
|
||||
InputStream stream = (InputStream)genericResponse.get("stream");
|
||||
InputStreamReader reader = new InputStreamReader(stream, "UTF-8");
|
||||
String xml = readString(reader);
|
||||
//Validate the nodes
|
||||
String error = h.validateXPath(xml,
|
||||
"//graph/node[1][@id ='jim']",
|
||||
"//graph/node[2][@id ='max']",
|
||||
"//graph/node[3][@id ='sam']");
|
||||
if(error != null) {
|
||||
throw new Exception(error);
|
||||
}
|
||||
//Validate the edges
|
||||
error = h.validateXPath(xml,
|
||||
"//graph/edge[1][@source ='bill']",
|
||||
"//graph/edge[1][@target ='jim']",
|
||||
"//graph/edge[2][@source ='bill']",
|
||||
"//graph/edge[2][@target ='max']",
|
||||
"//graph/edge[3][@source ='bill']",
|
||||
"//graph/edge[3][@target ='sam']");
|
||||
|
||||
if(error != null) {
|
||||
throw new Exception(error);
|
||||
}
|
||||
|
||||
client.close();
|
||||
}
|
||||
|
||||
private String readString(InputStreamReader reader) throws Exception{
|
||||
StringBuilder builder = new StringBuilder();
|
||||
int c = 0;
|
||||
while((c = reader.read()) != -1) {
|
||||
builder.append(((char)c));
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
protected List<Tuple> getTuples(TupleStream tupleStream) throws IOException {
|
||||
|
@ -691,9 +730,7 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
tupleStream.close();
|
||||
return tuples;
|
||||
}
|
||||
protected boolean assertOrder(List<Tuple> tuples, int... ids) throws Exception {
|
||||
return assertOrderOf(tuples, "id", ids);
|
||||
}
|
||||
|
||||
protected boolean assertOrderOf(List<Tuple> tuples, String fieldName, int... ids) throws Exception {
|
||||
int i = 0;
|
||||
for(int val : ids) {
|
||||
|
@ -707,56 +744,6 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
return true;
|
||||
}
|
||||
|
||||
protected boolean assertMapOrder(List<Tuple> tuples, int... ids) throws Exception {
|
||||
int i = 0;
|
||||
for(int val : ids) {
|
||||
Tuple t = tuples.get(i);
|
||||
List<Map> tip = t.getMaps("group");
|
||||
int id = (int)tip.get(0).get("id");
|
||||
if(id != val) {
|
||||
throw new Exception("Found value:"+id+" expecting:"+val);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
protected boolean assertFields(List<Tuple> tuples, String ... fields) throws Exception{
|
||||
for(Tuple tuple : tuples){
|
||||
for(String field : fields){
|
||||
if(!tuple.fields.containsKey(field)){
|
||||
throw new Exception(String.format(Locale.ROOT, "Expected field '%s' not found", field));
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
protected boolean assertNotFields(List<Tuple> tuples, String ... fields) throws Exception{
|
||||
for(Tuple tuple : tuples){
|
||||
for(String field : fields){
|
||||
if(tuple.fields.containsKey(field)){
|
||||
throw new Exception(String.format(Locale.ROOT, "Unexpected field '%s' found", field));
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected boolean assertGroupOrder(Tuple tuple, int... ids) throws Exception {
|
||||
List<?> group = (List<?>)tuple.get("tuples");
|
||||
int i=0;
|
||||
for(int val : ids) {
|
||||
Map<?,?> t = (Map<?,?>)group.get(i);
|
||||
Long tip = (Long)t.get("id");
|
||||
if(tip.intValue() != val) {
|
||||
throw new Exception("Found value:"+tip.intValue()+" expecting:"+val);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean assertLong(Tuple tuple, String fieldName, long l) throws Exception {
|
||||
long lv = (long)tuple.get(fieldName);
|
||||
if(lv != l) {
|
||||
|
@ -775,47 +762,9 @@ public class GraphExpressionTest extends AbstractFullDistribZkTestBase {
|
|||
throw new Exception("Longs not equal:"+expected+" : "+actual);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
protected boolean assertMaps(List<Map> maps, int... ids) throws Exception {
|
||||
if(maps.size() != ids.length) {
|
||||
throw new Exception("Expected id count != actual map count:"+ids.length+":"+maps.size());
|
||||
}
|
||||
|
||||
int i=0;
|
||||
for(int val : ids) {
|
||||
Map t = maps.get(i);
|
||||
Long tip = (Long)t.get("id");
|
||||
if(tip.intValue() != val) {
|
||||
throw new Exception("Found value:"+tip.intValue()+" expecting:"+val);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean assertList(List list, Object... vals) throws Exception {
|
||||
|
||||
if(list.size() != vals.length) {
|
||||
throw new Exception("Lists are not the same size:"+list.size() +" : "+vals.length);
|
||||
}
|
||||
|
||||
for(int i=0; i<list.size(); i++) {
|
||||
Object a = list.get(i);
|
||||
Object b = vals[i];
|
||||
if(!a.equals(b)) {
|
||||
throw new Exception("List items not equals:"+a+" : "+b);
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void indexr(Object... fields) throws Exception {
|
||||
SolrInputDocument doc = getDoc(fields);
|
||||
indexDoc(doc);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,30 +17,26 @@ package org.apache.solr.client.solrj.io.graph;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.solr.client.solrj.io.SolrClientCache;
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.client.solrj.io.stream.StreamContext;
|
||||
import org.apache.solr.client.solrj.io.stream.TupleStream;
|
||||
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
|
||||
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
|
||||
import org.apache.solr.cloud.AbstractZkTestCase;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
import org.apache.solr.cloud.AbstractDistribZkTestBase;
|
||||
import org.apache.solr.cloud.SolrCloudTestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
/**
|
||||
* All base tests will be done with CloudSolrStream. Under the covers CloudSolrStream uses SolrStream so
|
||||
* SolrStream will get fully exercised through these tests.
|
||||
|
@ -49,86 +45,57 @@ import java.util.HashSet;
|
|||
|
||||
@LuceneTestCase.Slow
|
||||
@LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40","Lucene41","Lucene42","Lucene45"})
|
||||
public class GraphTest extends AbstractFullDistribZkTestBase {
|
||||
public class GraphTest extends SolrCloudTestCase {
|
||||
|
||||
private static final String SOLR_HOME = getFile("solrj" + File.separator + "solr").getAbsolutePath();
|
||||
private StreamFactory streamFactory;
|
||||
private static final String COLLECTION = "collection1";
|
||||
|
||||
static {
|
||||
schemaString = "schema-streaming.xml";
|
||||
}
|
||||
private static final String id = "id";
|
||||
|
||||
private static final int TIMEOUT = 30;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeSuperClass() {
|
||||
AbstractZkTestCase.SOLRHOME = new File(SOLR_HOME());
|
||||
}
|
||||
public static void setupCluster() throws Exception {
|
||||
configureCluster(2)
|
||||
.addConfig("conf", getFile("solrj").toPath().resolve("solr").resolve("configsets").resolve("streaming").resolve("conf"))
|
||||
.configure();
|
||||
|
||||
@AfterClass
|
||||
public static void afterSuperClass() {
|
||||
|
||||
}
|
||||
|
||||
protected String getCloudSolrConfig() {
|
||||
return "solrconfig-streaming.xml";
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getSolrHome() {
|
||||
return SOLR_HOME;
|
||||
}
|
||||
|
||||
public static String SOLR_HOME() {
|
||||
return SOLR_HOME;
|
||||
CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1).process(cluster.getSolrClient());
|
||||
AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(),
|
||||
false, true, TIMEOUT);
|
||||
}
|
||||
|
||||
@Before
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
// we expect this time of exception as shards go up and down...
|
||||
//ignoreException(".*");
|
||||
//System.setProperty("export.test", "true");
|
||||
System.setProperty("numShards", Integer.toString(sliceCount));
|
||||
public void cleanIndex() throws Exception {
|
||||
new UpdateRequest()
|
||||
.deleteByQuery("*:*")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
resetExceptionIgnores();
|
||||
}
|
||||
@Test
|
||||
public void testShortestPathStream() throws Exception {
|
||||
|
||||
public GraphTest() {
|
||||
super();
|
||||
sliceCount = 2;
|
||||
|
||||
}
|
||||
|
||||
private void testShortestPathStream() throws Exception {
|
||||
|
||||
indexr(id, "0", "from_s", "jim", "to_s", "mike", "predicate_s", "knows");
|
||||
indexr(id, "1", "from_s", "jim", "to_s", "dave", "predicate_s", "knows");
|
||||
indexr(id, "2", "from_s", "jim", "to_s", "stan", "predicate_s", "knows");
|
||||
indexr(id, "3", "from_s", "dave", "to_s", "stan", "predicate_s", "knows");
|
||||
indexr(id, "4", "from_s", "dave", "to_s", "bill", "predicate_s", "knows");
|
||||
indexr(id, "5", "from_s", "dave", "to_s", "mike", "predicate_s", "knows");
|
||||
indexr(id, "20", "from_s", "dave", "to_s", "alex", "predicate_s", "knows");
|
||||
indexr(id, "21", "from_s", "alex", "to_s", "steve", "predicate_s", "knows");
|
||||
indexr(id, "6", "from_s", "stan", "to_s", "alice", "predicate_s", "knows");
|
||||
indexr(id, "7", "from_s", "stan", "to_s", "mary", "predicate_s", "knows");
|
||||
indexr(id, "8", "from_s", "stan", "to_s", "dave", "predicate_s", "knows");
|
||||
indexr(id, "10", "from_s", "mary", "to_s", "mike", "predicate_s", "knows");
|
||||
indexr(id, "11", "from_s", "mary", "to_s", "max", "predicate_s", "knows");
|
||||
indexr(id, "12", "from_s", "mary", "to_s", "jim", "predicate_s", "knows");
|
||||
indexr(id, "13", "from_s", "mary", "to_s", "steve", "predicate_s", "knows");
|
||||
|
||||
commit();
|
||||
new UpdateRequest()
|
||||
.add(id, "0", "from_s", "jim", "to_s", "mike", "predicate_s", "knows")
|
||||
.add(id, "1", "from_s", "jim", "to_s", "dave", "predicate_s", "knows")
|
||||
.add(id, "2", "from_s", "jim", "to_s", "stan", "predicate_s", "knows")
|
||||
.add(id, "3", "from_s", "dave", "to_s", "stan", "predicate_s", "knows")
|
||||
.add(id, "4", "from_s", "dave", "to_s", "bill", "predicate_s", "knows")
|
||||
.add(id, "5", "from_s", "dave", "to_s", "mike", "predicate_s", "knows")
|
||||
.add(id, "20", "from_s", "dave", "to_s", "alex", "predicate_s", "knows")
|
||||
.add(id, "21", "from_s", "alex", "to_s", "steve", "predicate_s", "knows")
|
||||
.add(id, "6", "from_s", "stan", "to_s", "alice", "predicate_s", "knows")
|
||||
.add(id, "7", "from_s", "stan", "to_s", "mary", "predicate_s", "knows")
|
||||
.add(id, "8", "from_s", "stan", "to_s", "dave", "predicate_s", "knows")
|
||||
.add(id, "10", "from_s", "mary", "to_s", "mike", "predicate_s", "knows")
|
||||
.add(id, "11", "from_s", "mary", "to_s", "max", "predicate_s", "knows")
|
||||
.add(id, "12", "from_s", "mary", "to_s", "jim", "predicate_s", "knows")
|
||||
.add(id, "13", "from_s", "mary", "to_s", "steve", "predicate_s", "knows")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
List<Tuple> tuples = null;
|
||||
Set<String> paths = null;
|
||||
ShortestPathStream stream = null;
|
||||
String zkHost = zkServer.getZkAddress();
|
||||
String zkHost = cluster.getZkServer().getZkAddress();
|
||||
StreamContext context = new StreamContext();
|
||||
SolrClientCache cache = new SolrClientCache();
|
||||
context.setSolrClientCache(cache);
|
||||
|
@ -260,40 +227,6 @@ public class GraphTest extends AbstractFullDistribZkTestBase {
|
|||
assertTrue(paths.contains("[jim, stan, mary, steve]"));
|
||||
|
||||
cache.close();
|
||||
del("*:*");
|
||||
commit();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void streamTests() throws Exception {
|
||||
assertNotNull(cloudClient);
|
||||
|
||||
handle.clear();
|
||||
handle.put("timestamp", SKIPVAL);
|
||||
|
||||
waitForRecoveriesToFinish(false);
|
||||
|
||||
del("*:*");
|
||||
|
||||
commit();
|
||||
|
||||
testShortestPathStream();
|
||||
|
||||
}
|
||||
|
||||
protected Map mapParams(String... vals) {
|
||||
Map params = new HashMap();
|
||||
String k = null;
|
||||
for(String val : vals) {
|
||||
if(k == null) {
|
||||
k = val;
|
||||
} else {
|
||||
params.put(k, val);
|
||||
k = null;
|
||||
}
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
protected List<Tuple> getTuples(TupleStream tupleStream) throws IOException {
|
||||
|
@ -311,58 +244,6 @@ public class GraphTest extends AbstractFullDistribZkTestBase {
|
|||
return tuples;
|
||||
}
|
||||
|
||||
protected Tuple getTuple(TupleStream tupleStream) throws IOException {
|
||||
tupleStream.open();
|
||||
Tuple t = tupleStream.read();
|
||||
tupleStream.close();
|
||||
return t;
|
||||
}
|
||||
|
||||
|
||||
protected boolean assertOrder(List<Tuple> tuples, int... ids) throws Exception {
|
||||
int i = 0;
|
||||
for(int val : ids) {
|
||||
Tuple t = tuples.get(i);
|
||||
Long tip = (Long)t.get("id");
|
||||
if(tip.intValue() != val) {
|
||||
throw new Exception("Found value:"+tip.intValue()+" expecting:"+val);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected boolean assertGroupOrder(Tuple tuple, int... ids) throws Exception {
|
||||
List group = (List)tuple.get("tuples");
|
||||
int i=0;
|
||||
for(int val : ids) {
|
||||
Map t = (Map)group.get(i);
|
||||
Long tip = (Long)t.get("id");
|
||||
if(tip.intValue() != val) {
|
||||
throw new Exception("Found value:"+tip.intValue()+" expecting:"+val);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected boolean assertMaps(List<Map> maps, int... ids) throws Exception {
|
||||
if(maps.size() != ids.length) {
|
||||
throw new Exception("Expected id count != actual map count:"+ids.length+":"+maps.size());
|
||||
}
|
||||
|
||||
int i=0;
|
||||
for(int val : ids) {
|
||||
Map t = maps.get(i);
|
||||
Long tip = (Long)t.get("id");
|
||||
if(tip.intValue() != val) {
|
||||
throw new Exception("Found value:"+tip.intValue()+" expecting:"+val);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean assertLong(Tuple tuple, String fieldName, long l) throws Exception {
|
||||
long lv = (long)tuple.get(fieldName);
|
||||
if(lv != l) {
|
||||
|
@ -372,16 +253,5 @@ public class GraphTest extends AbstractFullDistribZkTestBase {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void indexr(Object... fields) throws Exception {
|
||||
SolrInputDocument doc = getDoc(fields);
|
||||
indexDoc(doc);
|
||||
}
|
||||
|
||||
private void attachStreamFactory(TupleStream tupleStream) {
|
||||
StreamContext streamContext = new StreamContext();
|
||||
streamContext.setStreamFactory(streamFactory);
|
||||
tupleStream.setStreamContext(streamContext);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -54,6 +54,12 @@ public class JdbcDriverTest extends SolrTestCaseJ4 {
|
|||
Connection con = DriverManager.getConnection("jdbc:solr://", new Properties());
|
||||
}
|
||||
|
||||
@Test(expected = SQLException.class)
|
||||
public void testConnectionStringJumbled() throws Exception {
|
||||
final String sampleZkHost="zoo1:9983/foo";
|
||||
DriverManager.getConnection("solr:jdbc://" + sampleZkHost + "?collection=collection1", new Properties());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testProcessUrl() throws Exception {
|
||||
DriverImpl driver = new DriverImpl();
|
||||
|
|
|
@ -16,13 +16,13 @@
|
|||
*/
|
||||
package org.apache.solr.client.solrj.io.sql;
|
||||
|
||||
import java.io.File;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.sql.Types;
|
||||
import java.util.ArrayList;
|
||||
|
@ -32,11 +32,10 @@ import java.util.Properties;
|
|||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
|
||||
import org.apache.solr.cloud.AbstractZkTestCase;
|
||||
import org.apache.solr.common.cloud.DocCollection;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
import org.apache.solr.cloud.AbstractDistribZkTestBase;
|
||||
import org.apache.solr.cloud.SolrCloudTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -47,68 +46,45 @@ import org.junit.Test;
|
|||
|
||||
@Slow
|
||||
@LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40", "Lucene41", "Lucene42", "Lucene45"})
|
||||
public class JdbcTest extends AbstractFullDistribZkTestBase {
|
||||
public class JdbcTest extends SolrCloudTestCase {
|
||||
|
||||
private static final String SOLR_HOME = getFile("solrj" + File.separator + "solr").getAbsolutePath();
|
||||
private static final String COLLECTION = "collection1";
|
||||
|
||||
private static final String id = "id";
|
||||
|
||||
static {
|
||||
schemaString = "schema-sql.xml";
|
||||
}
|
||||
private static final int TIMEOUT = 30;
|
||||
|
||||
private static String zkHost;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeSuperClass() {
|
||||
AbstractZkTestCase.SOLRHOME = new File(SOLR_HOME);
|
||||
}
|
||||
public static void setupCluster() throws Exception {
|
||||
configureCluster(2)
|
||||
.addConfig("conf", getFile("solrj").toPath().resolve("solr").resolve("configsets").resolve("streaming").resolve("conf"))
|
||||
.configure();
|
||||
|
||||
@AfterClass
|
||||
public static void afterSuperClass() {
|
||||
CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1).process(cluster.getSolrClient());
|
||||
AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(),
|
||||
false, true, TIMEOUT);
|
||||
|
||||
}
|
||||
new UpdateRequest()
|
||||
.add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1", "testnull_i", null)
|
||||
.add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2", "testnull_i", "2")
|
||||
.add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3", "testnull_i", null)
|
||||
.add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4", "testnull_i", "4")
|
||||
.add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5", "testnull_i", null)
|
||||
.add(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6", "testnull_i", "6")
|
||||
.add(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7", "testnull_i", null)
|
||||
.add(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8", "testnull_i", "8")
|
||||
.add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9", "testnull_i", null)
|
||||
.add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10", "testnull_i", "10")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
protected String getCloudSolrConfig() {
|
||||
return "solrconfig-sql.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrHome() {
|
||||
return SOLR_HOME;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void distribSetUp() throws Exception {
|
||||
super.distribSetUp();
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
resetExceptionIgnores();
|
||||
zkHost = cluster.getZkServer().getZkAddress();
|
||||
}
|
||||
|
||||
@Test
|
||||
@ShardsFixed(num = 2)
|
||||
public void doTest() throws Exception {
|
||||
|
||||
waitForRecoveriesToFinish(false);
|
||||
|
||||
indexr(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1", "testnull_i", null);
|
||||
indexr(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2", "testnull_i", "2");
|
||||
indexr(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3", "testnull_i", null);
|
||||
indexr(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4", "testnull_i", "4");
|
||||
indexr(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5", "testnull_i", null);
|
||||
indexr(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6", "testnull_i", "6");
|
||||
indexr(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7", "testnull_i", null);
|
||||
indexr(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8", "testnull_i", "8");
|
||||
indexr(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9", "testnull_i", null);
|
||||
indexr(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10", "testnull_i", "10");
|
||||
|
||||
commit();
|
||||
|
||||
String zkHost = zkServer.getZkAddress();
|
||||
|
||||
Properties props = new Properties();
|
||||
|
||||
try (Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost + "?collection=collection1", props)) {
|
||||
|
@ -202,8 +178,13 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFacetAggregation() throws Exception {
|
||||
|
||||
//Test facet aggregation
|
||||
props = new Properties();
|
||||
Properties props = new Properties();
|
||||
props.put("aggregationMode", "facet");
|
||||
try (Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost + "?collection=collection1", props)) {
|
||||
try (Statement stmt = con.createStatement()) {
|
||||
|
@ -236,8 +217,13 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapReduceAggregation() throws Exception {
|
||||
|
||||
//Test map / reduce aggregation
|
||||
props = new Properties();
|
||||
Properties props = new Properties();
|
||||
props.put("aggregationMode", "map_reduce");
|
||||
props.put("numWorkers", "2");
|
||||
try (Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost + "?collection=collection1", props)) {
|
||||
|
@ -270,15 +256,20 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConnectionParams() throws Exception {
|
||||
|
||||
//Test params on the url
|
||||
try (Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost +
|
||||
try (Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost +
|
||||
"?collection=collection1&aggregationMode=map_reduce&numWorkers=2")) {
|
||||
|
||||
Properties p = ((ConnectionImpl) con).getProperties();
|
||||
|
||||
assert(p.getProperty("aggregationMode").equals("map_reduce"));
|
||||
assert(p.getProperty("numWorkers").equals("2"));
|
||||
assert (p.getProperty("aggregationMode").equals("map_reduce"));
|
||||
assert (p.getProperty("numWorkers").equals("2"));
|
||||
|
||||
try (Statement stmt = con.createStatement()) {
|
||||
try (ResultSet rs = stmt.executeQuery("select a_s, sum(a_f) from collection1 group by a_s " +
|
||||
|
@ -310,6 +301,11 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJDBCUrlParameters() throws Exception {
|
||||
|
||||
// Test JDBC paramters in URL
|
||||
try (Connection con = DriverManager.getConnection(
|
||||
"jdbc:solr://" + zkHost + "?collection=collection1&username=&password=&testKey1=testValue&testKey2")) {
|
||||
|
@ -350,6 +346,11 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJDBCPropertiesParameters() throws Exception {
|
||||
|
||||
// Test JDBC paramters in properties
|
||||
Properties providedProperties = new Properties();
|
||||
providedProperties.put("collection", "collection1");
|
||||
|
@ -360,10 +361,10 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
try (Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost, providedProperties)) {
|
||||
Properties p = ((ConnectionImpl) con).getProperties();
|
||||
assert(p.getProperty("username").equals(""));
|
||||
assert(p.getProperty("password").equals(""));
|
||||
assert(p.getProperty("testKey1").equals("testValue"));
|
||||
assert(p.getProperty("testKey2").equals(""));
|
||||
assert (p.getProperty("username").equals(""));
|
||||
assert (p.getProperty("password").equals(""));
|
||||
assert (p.getProperty("testKey1").equals("testValue"));
|
||||
assert (p.getProperty("testKey2").equals(""));
|
||||
|
||||
try (Statement stmt = con.createStatement()) {
|
||||
try (ResultSet rs = stmt.executeQuery("select a_s, sum(a_f) from collection1 group by a_s " +
|
||||
|
@ -394,10 +395,12 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testErrorPropagation() throws Exception {
|
||||
//Test error propagation
|
||||
props = new Properties();
|
||||
Properties props = new Properties();
|
||||
props.put("aggregationMode", "facet");
|
||||
try (Connection con = DriverManager.getConnection("jdbc:solr://" + zkHost + "?collection=collection1", props)) {
|
||||
try (Statement stmt = con.createStatement()) {
|
||||
|
@ -409,21 +412,38 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
testDriverMetadata();
|
||||
}
|
||||
|
||||
private void testDriverMetadata() throws Exception {
|
||||
String collection = DEFAULT_COLLECTION;
|
||||
@Test
|
||||
public void testSQLExceptionThrownWhenQueryAndConnUseDiffCollections() throws Exception {
|
||||
String badCollection = COLLECTION + "bad";
|
||||
String connectionString = "jdbc:solr://" + zkHost + "?collection=" + badCollection;
|
||||
String sql = "select id, a_i, a_s, a_f from " + badCollection + " order by a_i desc limit 2";
|
||||
|
||||
String connectionString1 = "jdbc:solr://" + zkServer.getZkAddress() + "?collection=" + collection +
|
||||
//Bad connection string: wrong collection name
|
||||
try(Connection connection = DriverManager.getConnection(connectionString)) {
|
||||
try (Statement statement = connection.createStatement()) {
|
||||
try (ResultSet ignored = statement.executeQuery(sql)) {
|
||||
fail("Expected query against wrong collection to throw a SQLException.");
|
||||
}
|
||||
}
|
||||
} catch (SQLException ignore) {
|
||||
// Expected exception due to miss matched collection
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDriverMetadata() throws Exception {
|
||||
String collection = COLLECTION;
|
||||
|
||||
String connectionString1 = "jdbc:solr://" + zkHost + "?collection=" + collection +
|
||||
"&username=&password=&testKey1=testValue&testKey2";
|
||||
Properties properties1 = new Properties();
|
||||
|
||||
String sql = "select id, a_i, a_s, a_f as my_float_col, testnull_i from " + collection +
|
||||
" order by a_i desc";
|
||||
|
||||
String connectionString2 = "jdbc:solr://" + zkServer.getZkAddress() + "?collection=" + collection +
|
||||
String connectionString2 = "jdbc:solr://" + zkHost + "?collection=" + collection +
|
||||
"&aggregationMode=map_reduce&numWorkers=2&username=&password=&testKey1=testValue&testKey2";
|
||||
Properties properties2 = new Properties();
|
||||
|
||||
|
@ -439,9 +459,9 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
|
|||
try (Connection con = DriverManager.getConnection(connectionString, properties)) {
|
||||
assertTrue(con.isValid(DEFAULT_CONNECTION_TIMEOUT));
|
||||
|
||||
assertEquals(zkServer.getZkAddress(), con.getCatalog());
|
||||
con.setCatalog(zkServer.getZkAddress());
|
||||
assertEquals(zkServer.getZkAddress(), con.getCatalog());
|
||||
assertEquals(zkHost, con.getCatalog());
|
||||
con.setCatalog(zkHost);
|
||||
assertEquals(zkHost, con.getCatalog());
|
||||
|
||||
assertEquals(null, con.getSchema());
|
||||
con.setSchema("myschema");
|
||||
|
@ -470,22 +490,22 @@ public class JdbcTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
try(ResultSet rs = databaseMetaData.getCatalogs()) {
|
||||
assertTrue(rs.next());
|
||||
assertEquals(zkServer.getZkAddress(), rs.getString("TABLE_CAT"));
|
||||
assertEquals(zkHost, rs.getString("TABLE_CAT"));
|
||||
assertFalse(rs.next());
|
||||
}
|
||||
|
||||
List<String> collections = new ArrayList<>();
|
||||
collections.addAll(cloudClient.getZkStateReader().getClusterState().getCollections());
|
||||
collections.addAll(cluster.getSolrClient().getZkStateReader().getClusterState().getCollectionsMap().keySet());
|
||||
Collections.sort(collections);
|
||||
|
||||
try(ResultSet rs = databaseMetaData.getSchemas()) {
|
||||
assertFalse(rs.next());
|
||||
}
|
||||
|
||||
try(ResultSet rs = databaseMetaData.getTables(zkServer.getZkAddress(), null, "%", null)) {
|
||||
try(ResultSet rs = databaseMetaData.getTables(zkHost, null, "%", null)) {
|
||||
for(String acollection : collections) {
|
||||
assertTrue(rs.next());
|
||||
assertEquals(zkServer.getZkAddress(), rs.getString("TABLE_CAT"));
|
||||
assertEquals(zkHost, rs.getString("TABLE_CAT"));
|
||||
assertNull(rs.getString("TABLE_SCHEM"));
|
||||
assertEquals(acollection, rs.getString("TABLE_NAME"));
|
||||
assertEquals("TABLE", rs.getString("TABLE_TYPE"));
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.solr.client.solrj.io.stream;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
|
@ -26,10 +25,8 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
import org.apache.solr.client.solrj.io.Tuple;
|
||||
import org.apache.solr.client.solrj.io.comp.ComparatorOrder;
|
||||
import org.apache.solr.client.solrj.io.comp.FieldComparator;
|
||||
|
@ -38,10 +35,10 @@ import org.apache.solr.client.solrj.io.stream.metrics.CountMetric;
|
|||
import org.apache.solr.client.solrj.io.stream.metrics.MaxMetric;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.MeanMetric;
|
||||
import org.apache.solr.client.solrj.io.stream.metrics.MinMetric;
|
||||
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
|
||||
import org.apache.solr.cloud.AbstractZkTestCase;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.junit.After;
|
||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
import org.apache.solr.cloud.AbstractDistribZkTestBase;
|
||||
import org.apache.solr.cloud.SolrCloudTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -50,19 +47,28 @@ import org.junit.Test;
|
|||
/**
|
||||
*/
|
||||
|
||||
@Slow
|
||||
@LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40","Lucene41","Lucene42","Lucene45"})
|
||||
public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
||||
public class JDBCStreamTest extends SolrCloudTestCase {
|
||||
|
||||
private static final String SOLR_HOME = getFile("solrj" + File.separator + "solr").getAbsolutePath();
|
||||
private static final String COLLECTION = "jdbc";
|
||||
|
||||
static {
|
||||
schemaString = "schema-streaming.xml";
|
||||
private static final int TIMEOUT = 30;
|
||||
|
||||
private static final String id = "id";
|
||||
|
||||
@BeforeClass
|
||||
public static void setupCluster() throws Exception {
|
||||
configureCluster(4)
|
||||
.addConfig("conf", getFile("solrj").toPath().resolve("solr").resolve("configsets").resolve("streaming").resolve("conf"))
|
||||
.configure();
|
||||
|
||||
CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1).process(cluster.getSolrClient());
|
||||
AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(),
|
||||
false, true, TIMEOUT);
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeSuperClass() throws Exception {
|
||||
AbstractZkTestCase.SOLRHOME = new File(SOLR_HOME());
|
||||
public static void setupDatabase() throws Exception {
|
||||
|
||||
// Initialize Database
|
||||
// Ok, so.....hsqldb is doing something totally weird so I thought I'd take a moment to explain it.
|
||||
|
@ -74,8 +80,7 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
// JDBCStream and is only a carryover from the driver we are testing with.
|
||||
Class.forName("org.hsqldb.jdbcDriver").newInstance();
|
||||
Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement();
|
||||
statement = connection.createStatement();
|
||||
Statement statement = connection.createStatement();
|
||||
statement.executeUpdate("create table COUNTRIES(CODE varchar(3) not null primary key, COUNTRY_NAME varchar(50), DELETED char(1) default 'N')");
|
||||
statement.executeUpdate("create table PEOPLE(ID int not null primary key, NAME varchar(50), COUNTRY_CODE char(2), DELETED char(1) default 'N')");
|
||||
statement.executeUpdate("create table PEOPLE_SPORTS(ID int not null primary key, PERSON_ID int, SPORT_NAME varchar(50), DELETED char(1) default 'N')");
|
||||
|
@ -83,107 +88,48 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterSuperClass() throws SQLException {
|
||||
public static void teardownDatabase() throws SQLException {
|
||||
Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement();
|
||||
statement.executeUpdate("shutdown");
|
||||
}
|
||||
|
||||
protected String getCloudSolrConfig() {
|
||||
return "solrconfig-streaming.xml";
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getSolrHome() {
|
||||
return SOLR_HOME;
|
||||
}
|
||||
|
||||
public static String SOLR_HOME() {
|
||||
return SOLR_HOME;
|
||||
@Before
|
||||
public void cleanIndex() throws Exception {
|
||||
new UpdateRequest()
|
||||
.deleteByQuery("*:*")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
}
|
||||
|
||||
@Before
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
// we expect this time of exception as shards go up and down...
|
||||
//ignoreException(".*");
|
||||
|
||||
System.setProperty("numShards", Integer.toString(sliceCount));
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
resetExceptionIgnores();
|
||||
}
|
||||
|
||||
public JDBCStreamTest() {
|
||||
super();
|
||||
sliceCount = 2;
|
||||
public void cleanDatabase() throws Exception {
|
||||
// Clear database
|
||||
try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement()) {
|
||||
statement.executeUpdate("delete from COUNTRIES WHERE 1=1");
|
||||
statement.executeUpdate("delete from PEOPLE WHERE 1=1");
|
||||
statement.executeUpdate("delete from PEOPLE_SPORTS WHERE 1=1");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAll() throws Exception{
|
||||
assertNotNull(cloudClient);
|
||||
public void testJDBCSelect() throws Exception {
|
||||
|
||||
handle.clear();
|
||||
handle.put("timestamp", SKIPVAL);
|
||||
|
||||
waitForRecoveriesToFinish(false);
|
||||
|
||||
// Run JDBC Only tests
|
||||
testJDBCSelect();
|
||||
testJDBCJoin();
|
||||
|
||||
// Run JDBC + Solr tests
|
||||
testJDBCSolrMerge();
|
||||
testJDBCSolrInnerJoinExpression();
|
||||
testJDBCSolrInnerJoinRollupExpression();
|
||||
testJDBCSolrInnerJoinExpressionWithProperties();
|
||||
|
||||
// Clear all data
|
||||
clearData();
|
||||
|
||||
// Delete database
|
||||
// done during afterSuperClass(...)
|
||||
}
|
||||
|
||||
private void clearData() throws Exception {
|
||||
// Clear Solr index
|
||||
del("*:*");
|
||||
commit();
|
||||
|
||||
// Clear database
|
||||
Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement();
|
||||
statement.executeUpdate("delete from COUNTRIES WHERE 1=1");
|
||||
statement.executeUpdate("delete from PEOPLE WHERE 1=1");
|
||||
statement.executeUpdate("delete from PEOPLE_SPORTS WHERE 1=1");
|
||||
statement.close();
|
||||
connection.close();
|
||||
}
|
||||
|
||||
private void testJDBCSelect() throws Exception {
|
||||
clearData();
|
||||
|
||||
// Load Database Data
|
||||
Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement();
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.close();
|
||||
connection.close();
|
||||
try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement()) {
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
}
|
||||
|
||||
TupleStream stream;
|
||||
List<Tuple> tuples;
|
||||
|
||||
// Simple 1
|
||||
stream = new JDBCStream("jdbc:hsqldb:mem:.", "select CODE,COUNTRY_NAME from COUNTRIES order by CODE", new FieldComparator("CODE", ComparatorOrder.ASCENDING));
|
||||
stream = new JDBCStream("jdbc:hsqldb:mem:.", "select CODE,COUNTRY_NAME from COUNTRIES order by CODE",
|
||||
new FieldComparator("CODE", ComparatorOrder.ASCENDING));
|
||||
tuples = getTuples(stream);
|
||||
|
||||
assert(tuples.size() == 4);
|
||||
|
@ -191,7 +137,8 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
assertOrderOf(tuples, "COUNTRY_NAME", "Netherlands", "Norway", "Nepal", "United States");
|
||||
|
||||
// Simple 2
|
||||
stream = new JDBCStream("jdbc:hsqldb:mem:.", "select CODE,COUNTRY_NAME from COUNTRIES order by COUNTRY_NAME", new FieldComparator("COUNTRY_NAME", ComparatorOrder.ASCENDING));
|
||||
stream = new JDBCStream("jdbc:hsqldb:mem:.", "select CODE,COUNTRY_NAME from COUNTRIES order by COUNTRY_NAME",
|
||||
new FieldComparator("COUNTRY_NAME", ComparatorOrder.ASCENDING));
|
||||
tuples = getTuples(stream);
|
||||
|
||||
assertEquals(4, tuples.size());
|
||||
|
@ -199,29 +146,28 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
assertOrderOf(tuples, "COUNTRY_NAME", "Nepal", "Netherlands", "Norway", "United States");
|
||||
|
||||
}
|
||||
|
||||
private void testJDBCJoin() throws Exception {
|
||||
clearData();
|
||||
|
||||
@Test
|
||||
public void testJDBCJoin() throws Exception {
|
||||
|
||||
// Load Database Data
|
||||
Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement();
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','NI')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NG')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NF')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NE')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','NC')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NZ')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','NR')");
|
||||
statement.close();
|
||||
connection.close();
|
||||
try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement()) {
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','NI')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NG')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NF')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NE')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','NC')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NZ')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','NR')");
|
||||
}
|
||||
|
||||
TupleStream stream;
|
||||
List<Tuple> tuples;
|
||||
|
@ -234,28 +180,28 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
assertOrderOf(tuples, "ID", 11, 17, 19);
|
||||
assertOrderOf(tuples, "NAME", "Emma", "Mia", "Olivia");
|
||||
}
|
||||
|
||||
private void testJDBCSolrMerge() throws Exception {
|
||||
clearData();
|
||||
|
||||
@Test
|
||||
public void testJDBCSolrMerge() throws Exception {
|
||||
|
||||
// Load Database Data
|
||||
Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement();
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('AL', 'Algeria')");
|
||||
statement.close();
|
||||
connection.close();
|
||||
try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement()) {
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('AL', 'Algeria')");
|
||||
}
|
||||
|
||||
// Load Solr
|
||||
indexr(id, "0", "code_s", "GB", "name_s", "Great Britian");
|
||||
indexr(id, "1", "code_s", "CA", "name_s", "Canada");
|
||||
commit();
|
||||
new UpdateRequest()
|
||||
.add(id, "0", "code_s", "GB", "name_s", "Great Britian")
|
||||
.add(id, "1", "code_s", "CA", "name_s", "Canada")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
StreamFactory factory = new StreamFactory()
|
||||
.withCollectionZkHost("collection1", zkServer.getZkAddress())
|
||||
.withCollectionZkHost(COLLECTION, cluster.getZkServer().getZkAddress())
|
||||
.withFunctionName("search", CloudSolrStream.class);
|
||||
|
||||
List<Tuple> tuples;
|
||||
|
@ -263,7 +209,7 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
// Simple 1
|
||||
TupleStream jdbcStream = new JDBCStream("jdbc:hsqldb:mem:.", "select CODE,COUNTRY_NAME from COUNTRIES order by CODE", new FieldComparator("CODE", ComparatorOrder.ASCENDING));
|
||||
TupleStream selectStream = new SelectStream(jdbcStream, new HashMap<String, String>(){{ put("CODE", "code_s"); put("COUNTRY_NAME", "name_s"); }});
|
||||
TupleStream searchStream = factory.constructStream("search(collection1, fl=\"code_s,name_s\",q=\"*:*\",sort=\"code_s asc\")");
|
||||
TupleStream searchStream = factory.constructStream("search(" + COLLECTION + ", fl=\"code_s,name_s\",q=\"*:*\",sort=\"code_s asc\")");
|
||||
TupleStream mergeStream = new MergeStream(new FieldComparator("code_s", ComparatorOrder.ASCENDING), new TupleStream[]{selectStream,searchStream});
|
||||
|
||||
tuples = getTuples(mergeStream);
|
||||
|
@ -272,49 +218,49 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
assertOrderOf(tuples, "code_s", "AL","CA","GB","NL","NO","NP","US");
|
||||
assertOrderOf(tuples, "name_s", "Algeria", "Canada", "Great Britian", "Netherlands", "Norway", "Nepal", "United States");
|
||||
}
|
||||
|
||||
private void testJDBCSolrInnerJoinExpression() throws Exception{
|
||||
clearData();
|
||||
|
||||
@Test
|
||||
public void testJDBCSolrInnerJoinExpression() throws Exception{
|
||||
|
||||
StreamFactory factory = new StreamFactory()
|
||||
.withCollectionZkHost("collection1", zkServer.getZkAddress())
|
||||
.withCollectionZkHost(COLLECTION, cluster.getZkServer().getZkAddress())
|
||||
.withFunctionName("search", CloudSolrStream.class)
|
||||
.withFunctionName("select", SelectStream.class)
|
||||
.withFunctionName("innerJoin", InnerJoinStream.class)
|
||||
.withFunctionName("jdbc", JDBCStream.class);
|
||||
|
||||
// Load Database Data
|
||||
Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement();
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','US')");
|
||||
statement.close();
|
||||
connection.close();
|
||||
try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement()) {
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','US')");
|
||||
}
|
||||
|
||||
// Load solr data
|
||||
indexr(id, "1", "rating_f", "3.5", "personId_i", "11");
|
||||
indexr(id, "2", "rating_f", "5", "personId_i", "12");
|
||||
indexr(id, "3", "rating_f", "2.2", "personId_i", "13");
|
||||
indexr(id, "4", "rating_f", "4.3", "personId_i", "14");
|
||||
indexr(id, "5", "rating_f", "3.5", "personId_i", "15");
|
||||
indexr(id, "6", "rating_f", "3", "personId_i", "16");
|
||||
indexr(id, "7", "rating_f", "3", "personId_i", "17");
|
||||
indexr(id, "8", "rating_f", "4", "personId_i", "18");
|
||||
indexr(id, "9", "rating_f", "4.1", "personId_i", "19");
|
||||
indexr(id, "10", "rating_f", "4.8", "personId_i", "20");
|
||||
commit();
|
||||
new UpdateRequest()
|
||||
.add(id, "1", "rating_f", "3.5", "personId_i", "11")
|
||||
.add(id, "2", "rating_f", "5", "personId_i", "12")
|
||||
.add(id, "3", "rating_f", "2.2", "personId_i", "13")
|
||||
.add(id, "4", "rating_f", "4.3", "personId_i", "14")
|
||||
.add(id, "5", "rating_f", "3.5", "personId_i", "15")
|
||||
.add(id, "6", "rating_f", "3", "personId_i", "16")
|
||||
.add(id, "7", "rating_f", "3", "personId_i", "17")
|
||||
.add(id, "8", "rating_f", "4", "personId_i", "18")
|
||||
.add(id, "9", "rating_f", "4.1", "personId_i", "19")
|
||||
.add(id, "10", "rating_f", "4.8", "personId_i", "20")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
String expression;
|
||||
TupleStream stream;
|
||||
|
@ -324,7 +270,7 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
expression =
|
||||
"innerJoin("
|
||||
+ " select("
|
||||
+ " search(collection1, fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
|
||||
+ " search(" + COLLECTION + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
|
||||
+ " personId_i as personId,"
|
||||
+ " rating_f as rating"
|
||||
+ " ),"
|
||||
|
@ -347,48 +293,48 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
assertOrderOf(tuples, "country", "Netherlands","United States","Netherlands","Netherlands","Netherlands","United States","United States","Netherlands","Netherlands","United States");
|
||||
}
|
||||
|
||||
private void testJDBCSolrInnerJoinExpressionWithProperties() throws Exception{
|
||||
clearData();
|
||||
@Test
|
||||
public void testJDBCSolrInnerJoinExpressionWithProperties() throws Exception{
|
||||
|
||||
StreamFactory factory = new StreamFactory()
|
||||
.withCollectionZkHost("collection1", zkServer.getZkAddress())
|
||||
.withCollectionZkHost(COLLECTION, cluster.getZkServer().getZkAddress())
|
||||
.withFunctionName("search", CloudSolrStream.class)
|
||||
.withFunctionName("select", SelectStream.class)
|
||||
.withFunctionName("innerJoin", InnerJoinStream.class)
|
||||
.withFunctionName("jdbc", JDBCStream.class);
|
||||
|
||||
// Load Database Data
|
||||
Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement();
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','US')");
|
||||
statement.close();
|
||||
connection.close();
|
||||
try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement()) {
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','US')");
|
||||
}
|
||||
|
||||
// Load solr data
|
||||
indexr(id, "1", "rating_f", "3.5", "personId_i", "11");
|
||||
indexr(id, "2", "rating_f", "5", "personId_i", "12");
|
||||
indexr(id, "3", "rating_f", "2.2", "personId_i", "13");
|
||||
indexr(id, "4", "rating_f", "4.3", "personId_i", "14");
|
||||
indexr(id, "5", "rating_f", "3.5", "personId_i", "15");
|
||||
indexr(id, "6", "rating_f", "3", "personId_i", "16");
|
||||
indexr(id, "7", "rating_f", "3", "personId_i", "17");
|
||||
indexr(id, "8", "rating_f", "4", "personId_i", "18");
|
||||
indexr(id, "9", "rating_f", "4.1", "personId_i", "19");
|
||||
indexr(id, "10", "rating_f", "4.8", "personId_i", "20");
|
||||
commit();
|
||||
new UpdateRequest()
|
||||
.add(id, "1", "rating_f", "3.5", "personId_i", "11")
|
||||
.add(id, "2", "rating_f", "5", "personId_i", "12")
|
||||
.add(id, "3", "rating_f", "2.2", "personId_i", "13")
|
||||
.add(id, "4", "rating_f", "4.3", "personId_i", "14")
|
||||
.add(id, "5", "rating_f", "3.5", "personId_i", "15")
|
||||
.add(id, "6", "rating_f", "3", "personId_i", "16")
|
||||
.add(id, "7", "rating_f", "3", "personId_i", "17")
|
||||
.add(id, "8", "rating_f", "4", "personId_i", "18")
|
||||
.add(id, "9", "rating_f", "4.1", "personId_i", "19")
|
||||
.add(id, "10", "rating_f", "4.8", "personId_i", "20")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
String expression;
|
||||
TupleStream stream;
|
||||
|
@ -401,7 +347,7 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
expression =
|
||||
"innerJoin("
|
||||
+ " select("
|
||||
+ " search(collection1, fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
|
||||
+ " search(" + COLLECTION + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
|
||||
+ " personId_i as personId,"
|
||||
+ " rating_f as rating"
|
||||
+ " ),"
|
||||
|
@ -430,7 +376,7 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
expression =
|
||||
"innerJoin("
|
||||
+ " select("
|
||||
+ " search(collection1, fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
|
||||
+ " search(" + COLLECTION + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
|
||||
+ " personId_i as personId,"
|
||||
+ " rating_f as rating"
|
||||
+ " ),"
|
||||
|
@ -453,12 +399,11 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
assertOrderOf(tuples, "country", "Netherlands","United States","Netherlands","Netherlands","Netherlands","United States","United States","Netherlands","Netherlands","United States");
|
||||
}
|
||||
|
||||
|
||||
private void testJDBCSolrInnerJoinRollupExpression() throws Exception{
|
||||
clearData();
|
||||
@Test
|
||||
public void testJDBCSolrInnerJoinRollupExpression() throws Exception{
|
||||
|
||||
StreamFactory factory = new StreamFactory()
|
||||
.withCollectionZkHost("collection1", zkServer.getZkAddress())
|
||||
.withCollectionZkHost(COLLECTION, cluster.getZkServer().getZkAddress())
|
||||
.withFunctionName("search", CloudSolrStream.class)
|
||||
.withFunctionName("select", SelectStream.class)
|
||||
.withFunctionName("hashJoin", HashJoinStream.class)
|
||||
|
@ -471,38 +416,37 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
;
|
||||
|
||||
// Load Database Data
|
||||
Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement();
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','US')");
|
||||
statement.close();
|
||||
connection.close();
|
||||
try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:.");
|
||||
Statement statement = connection.createStatement()) {
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')");
|
||||
statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','US')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')");
|
||||
statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','US')");
|
||||
}
|
||||
|
||||
// Load solr data
|
||||
indexr(id, "1", "rating_f", "3.5", "personId_i", "11");
|
||||
indexr(id, "3", "rating_f", "2.2", "personId_i", "13");
|
||||
indexr(id, "4", "rating_f", "4.3", "personId_i", "14");
|
||||
indexr(id, "5", "rating_f", "3.5", "personId_i", "15");
|
||||
indexr(id, "8", "rating_f", "4", "personId_i", "18");
|
||||
indexr(id, "9", "rating_f", "4.1", "personId_i", "19");
|
||||
|
||||
indexr(id, "2", "rating_f", "5", "personId_i", "12");
|
||||
indexr(id, "6", "rating_f", "3", "personId_i", "16");
|
||||
indexr(id, "7", "rating_f", "3", "personId_i", "17");
|
||||
indexr(id, "10", "rating_f", "4.8", "personId_i", "20");
|
||||
commit();
|
||||
new UpdateRequest()
|
||||
.add(id, "1", "rating_f", "3.5", "personId_i", "11")
|
||||
.add(id, "3", "rating_f", "2.2", "personId_i", "13")
|
||||
.add(id, "4", "rating_f", "4.3", "personId_i", "14")
|
||||
.add(id, "5", "rating_f", "3.5", "personId_i", "15")
|
||||
.add(id, "8", "rating_f", "4", "personId_i", "18")
|
||||
.add(id, "9", "rating_f", "4.1", "personId_i", "19")
|
||||
.add(id, "2", "rating_f", "5", "personId_i", "12")
|
||||
.add(id, "6", "rating_f", "3", "personId_i", "16")
|
||||
.add(id, "7", "rating_f", "3", "personId_i", "17")
|
||||
.add(id, "10", "rating_f", "4.8", "personId_i", "20")
|
||||
.commit(cluster.getSolrClient(), COLLECTION);
|
||||
|
||||
String expression;
|
||||
TupleStream stream;
|
||||
|
@ -513,7 +457,7 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
"rollup("
|
||||
+ " hashJoin("
|
||||
+ " hashed=select("
|
||||
+ " search(collection1, fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
|
||||
+ " search(" + COLLECTION + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\"),"
|
||||
+ " personId_i as personId,"
|
||||
+ " rating_f as rating"
|
||||
+ " ),"
|
||||
|
@ -562,6 +506,7 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
tupleStream.close();
|
||||
return tuples;
|
||||
}
|
||||
|
||||
protected boolean assertOrderOf(List<Tuple> tuples, String fieldName, int... values) throws Exception {
|
||||
int i = 0;
|
||||
for(int val : values) {
|
||||
|
@ -574,6 +519,7 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected boolean assertOrderOf(List<Tuple> tuples, String fieldName, double... values) throws Exception {
|
||||
int i = 0;
|
||||
for(double val : values) {
|
||||
|
@ -586,6 +532,7 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected boolean assertOrderOf(List<Tuple> tuples, String fieldName, String... values) throws Exception {
|
||||
int i = 0;
|
||||
for(String val : values) {
|
||||
|
@ -617,6 +564,7 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected boolean assertNotFields(List<Tuple> tuples, String ... fields) throws Exception{
|
||||
for(Tuple tuple : tuples){
|
||||
for(String field : fields){
|
||||
|
@ -649,9 +597,4 @@ public class JDBCStreamTest extends AbstractFullDistribZkTestBase {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void indexr(Object... fields) throws Exception {
|
||||
SolrInputDocument doc = getDoc(fields);
|
||||
indexDoc(doc);
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1353,7 +1353,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
|
||||
if (slices == null) {
|
||||
throw new RuntimeException("Could not find collection "
|
||||
+ DEFAULT_COLLECTION + " in " + clusterState.getCollections());
|
||||
+ DEFAULT_COLLECTION + " in " + clusterState.getCollectionsMap().keySet());
|
||||
}
|
||||
|
||||
for (CloudJettyRunner cjetty : cloudJettys) {
|
||||
|
@ -1916,9 +1916,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
|||
if (collection != null) {
|
||||
cs = clusterState.getCollection(collection).toString();
|
||||
} else {
|
||||
Map<String,DocCollection> map = new HashMap<>();
|
||||
for (String coll : clusterState.getCollections())
|
||||
map.put(coll, clusterState.getCollection(coll));
|
||||
Map<String,DocCollection> map = clusterState.getCollectionsMap();
|
||||
CharArr out = new CharArr();
|
||||
new JSONWriter(out, 2).write(map);
|
||||
cs = out.toString();
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
package org.apache.solr.util;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Random;
|
||||
import java.security.KeyManagementException;
|
||||
import java.security.KeyStore;
|
||||
import java.security.KeyStoreException;
|
||||
|
@ -43,6 +44,8 @@ import org.apache.solr.client.solrj.impl.HttpClientUtil;
|
|||
import org.apache.solr.client.solrj.impl.HttpClientUtil.SchemaRegistryProvider;
|
||||
import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
|
||||
import org.eclipse.jetty.util.resource.Resource;
|
||||
import org.eclipse.jetty.util.security.CertificateUtils;
|
||||
import org.eclipse.jetty.util.ssl.SslContextFactory;
|
||||
|
@ -266,33 +269,87 @@ public class SSLTestConfig extends SSLConfig {
|
|||
* Tests do not need secure SSL.
|
||||
*/
|
||||
private static class NullSecureRandom extends SecureRandom {
|
||||
public static final SecureRandom INSTANCE = new NullSecureRandom();
|
||||
|
||||
/**
|
||||
* The one and only instance that should be used, specific impl may vary based on platform
|
||||
* @see Constants#SUN_OS
|
||||
* @see <a href="https://issues.apache.org/jira/browse/SOLR-9068">SOLR-9068</a>
|
||||
*/
|
||||
public static final SecureRandom INSTANCE = Constants.SUN_OS
|
||||
? new NullSecureRandom(NullSecureRandomSpi.PSUEDO_RAND_INSTANCE)
|
||||
: new NullSecureRandom(NullSecureRandomSpi.NULL_INSTANCE);
|
||||
|
||||
/** A source of psuedo random data if needed */
|
||||
private static final Random RAND = new Random(42);
|
||||
|
||||
/** SPI Used to init all instances */
|
||||
private static final SecureRandomSpi NULL_SPI = new SecureRandomSpi() {
|
||||
/** NOOP: returns new uninitialized byte[] */
|
||||
/** SPI base class for all NullSecureRandom instances */
|
||||
private static class NullSecureRandomSpi extends SecureRandomSpi {
|
||||
private NullSecureRandomSpi() {
|
||||
/* NOOP */
|
||||
}
|
||||
/**
|
||||
* Helper method that can be used to fill an array with non-zero data.
|
||||
* Default impl is No-Op
|
||||
*/
|
||||
public byte[] fillData(byte[] data) {
|
||||
return data; /* NOOP */
|
||||
}
|
||||
/** returns a new byte[] filled with static data */
|
||||
@Override
|
||||
public byte[] engineGenerateSeed(int numBytes) {
|
||||
return new byte[numBytes];
|
||||
return fillData(new byte[numBytes]);
|
||||
}
|
||||
/** fills the byte[] with static data */
|
||||
@Override
|
||||
public void engineNextBytes(byte[] bytes) {
|
||||
fillData(bytes);
|
||||
}
|
||||
/** NOOP */
|
||||
public void engineNextBytes(byte[] bytes) { /* NOOP */ }
|
||||
/** NOOP */
|
||||
@Override
|
||||
public void engineSetSeed(byte[] seed) { /* NOOP */ }
|
||||
};
|
||||
|
||||
private NullSecureRandom() {
|
||||
super(NULL_SPI, null) ;
|
||||
|
||||
/** Instance to use on platforms w/SSLEngines that work fine when SecureRandom returns constant bytes */
|
||||
public static final NullSecureRandomSpi NULL_INSTANCE = new NullSecureRandomSpi();
|
||||
|
||||
/**
|
||||
* Instance to use on platforms that need at least psuedo-random data for the SSLEngine to not break
|
||||
* (Attempted workarround of Solaris SSL Padding bug: SOLR-9068)
|
||||
*/
|
||||
public static final NullSecureRandomSpi PSUEDO_RAND_INSTANCE = new NullSecureRandomSpi() {
|
||||
/**
|
||||
* Fill with Psuedo-Random data.
|
||||
* (Attempted workarround of Solaris SSL Padding bug: SOLR-9068)
|
||||
*/
|
||||
@Override
|
||||
public byte[] fillData(byte[] data) {
|
||||
RAND.nextBytes(data);
|
||||
return data;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** NOOP: returns new uninitialized byte[] */
|
||||
private NullSecureRandom(NullSecureRandomSpi spi) {
|
||||
super(spi, null);
|
||||
this.spi = spi;
|
||||
}
|
||||
|
||||
private NullSecureRandomSpi spi;
|
||||
|
||||
/** fills a new byte[] with data from SPI */
|
||||
@Override
|
||||
public byte[] generateSeed(int numBytes) {
|
||||
return new byte[numBytes];
|
||||
return spi.fillData(new byte[numBytes]);
|
||||
}
|
||||
/** fills the byte[] with data from SPI */
|
||||
@Override
|
||||
synchronized public void nextBytes(byte[] bytes) {
|
||||
spi.fillData(bytes);
|
||||
}
|
||||
/** NOOP */
|
||||
synchronized public void nextBytes(byte[] bytes) { /* NOOP */ }
|
||||
/** NOOP */
|
||||
@Override
|
||||
synchronized public void setSeed(byte[] seed) { /* NOOP */ }
|
||||
/** NOOP */
|
||||
@Override
|
||||
synchronized public void setSeed(long seed) { /* NOOP */ }
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue