mirror of https://github.com/apache/lucene.git
Merge remote-tracking branch 'origin/master'
This commit is contained in:
commit
7b89d0f698
|
@ -48,9 +48,6 @@ Optimizations
|
|||
* LUCENE-7105, LUCENE-7215: Optimize LatLonPoint's newDistanceQuery.
|
||||
(Robert Muir)
|
||||
|
||||
* LUCENE-7109: LatLonPoint's newPolygonQuery supports two-phase
|
||||
iteration. (Robert Muir)
|
||||
|
||||
* LUCENE-7097: IntroSorter now recurses to 2 * log_2(count) quicksort
|
||||
stack depth before switching to heapsort (Adrien Grand, Mike McCandless)
|
||||
|
||||
|
@ -64,8 +61,8 @@ Optimizations
|
|||
multiple polygons and holes, with memory usage independent of
|
||||
polygon complexity. (Karl Wright, Mike McCandless, Robert Muir)
|
||||
|
||||
* LUCENE-7159, LUCENE-7222, LUCENE-7229: Speed up LatLonPoint polygon performance for complex
|
||||
polygons. (Robert Muir)
|
||||
* LUCENE-7159, LUCENE-7222, LUCENE-7229, LUCENE-7239: Speed up LatLonPoint
|
||||
polygon performance. (Robert Muir)
|
||||
|
||||
* LUCENE-7211: Reduce memory & GC for spatial RPT Intersects when the number of
|
||||
matching docs is small. (Jeff Wartes, David Smiley)
|
||||
|
|
|
@ -116,7 +116,9 @@ public final class BitUtil {
|
|||
*
|
||||
* Adapted from: http://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN
|
||||
*/
|
||||
public static long interleave(long v1, long v2) {
|
||||
public static long interleave(int even, int odd) {
|
||||
long v1 = 0x00000000FFFFFFFFL & even;
|
||||
long v2 = 0x00000000FFFFFFFFL & odd;
|
||||
v1 = (v1 | (v1 << SHIFT[4])) & MAGIC[4];
|
||||
v1 = (v1 | (v1 << SHIFT[3])) & MAGIC[3];
|
||||
v1 = (v1 | (v1 << SHIFT[2])) & MAGIC[2];
|
||||
|
|
|
@ -148,6 +148,7 @@ public class ReplicationService {
|
|||
throw new ServletException("unrecognized shard ID " + pathElements[SHARD_IDX]);
|
||||
}
|
||||
|
||||
// SOLR-8933 Don't close this stream.
|
||||
ServletOutputStream resOut = resp.getOutputStream();
|
||||
try {
|
||||
switch (action) {
|
||||
|
|
|
@ -60,14 +60,14 @@ final class LatLonGrid {
|
|||
final long latPerCell;
|
||||
final long lonPerCell;
|
||||
|
||||
final Polygon[] polygons;
|
||||
final LatLonTree[] tree;
|
||||
|
||||
LatLonGrid(int minLat, int maxLat, int minLon, int maxLon, Polygon... polygons) {
|
||||
this.minLat = minLat;
|
||||
this.maxLat = maxLat;
|
||||
this.minLon = minLon;
|
||||
this.maxLon = maxLon;
|
||||
this.polygons = polygons;
|
||||
this.tree = LatLonTree.build(polygons);
|
||||
if (minLon > maxLon) {
|
||||
// maybe make 2 grids if you want this?
|
||||
throw new IllegalArgumentException("Grid cannot cross the dateline");
|
||||
|
@ -88,12 +88,12 @@ final class LatLonGrid {
|
|||
// but it prevents edge case bugs.
|
||||
latPerCell = latitudeRange / (GRID_SIZE - 1);
|
||||
lonPerCell = longitudeRange / (GRID_SIZE - 1);
|
||||
fill(polygons, 0, GRID_SIZE, 0, GRID_SIZE);
|
||||
fill(0, GRID_SIZE, 0, GRID_SIZE);
|
||||
}
|
||||
}
|
||||
|
||||
/** fills a 2D range of grid cells [minLatIndex .. maxLatIndex) X [minLonIndex .. maxLonIndex) */
|
||||
void fill(Polygon[] polygons, int minLatIndex, int maxLatIndex, int minLonIndex, int maxLonIndex) {
|
||||
void fill(int minLatIndex, int maxLatIndex, int minLonIndex, int maxLonIndex) {
|
||||
// grid cells at the edge of the bounding box are typically smaller than normal, because we spill over.
|
||||
long cellMinLat = minLat + (minLatIndex * latPerCell);
|
||||
long cellMaxLat = Math.min(maxLat, minLat + (maxLatIndex * latPerCell) - 1);
|
||||
|
@ -104,7 +104,7 @@ final class LatLonGrid {
|
|||
assert cellMaxLat >= cellMinLat;
|
||||
assert cellMaxLon >= cellMinLon;
|
||||
|
||||
Relation relation = Polygon.relate(polygons, decodeLatitude((int) cellMinLat),
|
||||
Relation relation = LatLonTree.relate(tree, decodeLatitude((int) cellMinLat),
|
||||
decodeLatitude((int) cellMaxLat),
|
||||
decodeLongitude((int) cellMinLon),
|
||||
decodeLongitude((int) cellMaxLon));
|
||||
|
@ -127,10 +127,10 @@ final class LatLonGrid {
|
|||
// grid range crosses our polygon, keep recursing.
|
||||
int midLatIndex = (minLatIndex + maxLatIndex) >>> 1;
|
||||
int midLonIndex = (minLonIndex + maxLonIndex) >>> 1;
|
||||
fill(polygons, minLatIndex, midLatIndex, minLonIndex, midLonIndex);
|
||||
fill(polygons, minLatIndex, midLatIndex, midLonIndex, maxLonIndex);
|
||||
fill(polygons, midLatIndex, maxLatIndex, minLonIndex, midLonIndex);
|
||||
fill(polygons, midLatIndex, maxLatIndex, midLonIndex, maxLonIndex);
|
||||
fill(minLatIndex, midLatIndex, minLonIndex, midLonIndex);
|
||||
fill(minLatIndex, midLatIndex, midLonIndex, maxLonIndex);
|
||||
fill(midLatIndex, maxLatIndex, minLonIndex, midLonIndex);
|
||||
fill(midLatIndex, maxLatIndex, midLonIndex, maxLonIndex);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -147,7 +147,7 @@ final class LatLonGrid {
|
|||
// the grid is unsure (boundary): do a real test.
|
||||
double docLatitude = decodeLatitude(latitude);
|
||||
double docLongitude = decodeLongitude(longitude);
|
||||
return Polygon.contains(polygons, docLatitude, docLongitude);
|
||||
return LatLonTree.contains(tree, docLatitude, docLongitude);
|
||||
}
|
||||
|
||||
/** Returns grid index of lat/lon, or -1 if the value is outside of the bounding box. */
|
||||
|
|
|
@ -29,19 +29,13 @@ import org.apache.lucene.search.DocIdSetIterator;
|
|||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.TwoPhaseIterator;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.DocIdSetBuilder;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.SparseFixedBitSet;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.geo.Polygon;
|
||||
|
||||
|
@ -98,13 +92,6 @@ final class LatLonPointInPolygonQuery extends Query {
|
|||
NumericUtils.intToSortableBytes(encodeLongitude(box.minLon), minLon, 0);
|
||||
NumericUtils.intToSortableBytes(encodeLongitude(box.maxLon), maxLon, 0);
|
||||
|
||||
// TODO: make this fancier, but currently linear with number of vertices
|
||||
float cumulativeCost = 0;
|
||||
for (Polygon polygon : polygons) {
|
||||
cumulativeCost += 20 * (polygon.getPolyLats().length + polygon.getHoles().length);
|
||||
}
|
||||
final float matchCost = cumulativeCost;
|
||||
|
||||
final LatLonGrid grid = new LatLonGrid(encodeLatitude(box.minLat),
|
||||
encodeLatitude(box.maxLat),
|
||||
encodeLongitude(box.minLon),
|
||||
|
@ -127,22 +114,14 @@ final class LatLonPointInPolygonQuery extends Query {
|
|||
}
|
||||
LatLonPoint.checkCompatible(fieldInfo);
|
||||
|
||||
// approximation (postfiltering has not yet been applied)
|
||||
// matching docids
|
||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc());
|
||||
// subset of documents that need no postfiltering, this is purely an optimization
|
||||
final BitSet preApproved;
|
||||
// dumb heuristic: if the field is really sparse, use a sparse impl
|
||||
if (values.getDocCount(field) * 100L < reader.maxDoc()) {
|
||||
preApproved = new SparseFixedBitSet(reader.maxDoc());
|
||||
} else {
|
||||
preApproved = new FixedBitSet(reader.maxDoc());
|
||||
}
|
||||
|
||||
values.intersect(field,
|
||||
new IntersectVisitor() {
|
||||
@Override
|
||||
public void visit(int docID) {
|
||||
result.add(docID);
|
||||
preApproved.set(docID);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -156,8 +135,11 @@ final class LatLonPointInPolygonQuery extends Query {
|
|||
// outside of global bounding box range
|
||||
return;
|
||||
}
|
||||
if (grid.contains(NumericUtils.sortableBytesToInt(packedValue, 0),
|
||||
NumericUtils.sortableBytesToInt(packedValue, Integer.BYTES))) {
|
||||
result.add(docID);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
|
@ -184,36 +166,7 @@ final class LatLonPointInPolygonQuery extends Query {
|
|||
return null;
|
||||
}
|
||||
|
||||
// return two-phase iterator using docvalues to postfilter candidates
|
||||
SortedNumericDocValues docValues = DocValues.getSortedNumeric(reader, field);
|
||||
|
||||
TwoPhaseIterator iterator = new TwoPhaseIterator(disi) {
|
||||
@Override
|
||||
public boolean matches() throws IOException {
|
||||
int docId = disi.docID();
|
||||
if (preApproved.get(docId)) {
|
||||
return true;
|
||||
} else {
|
||||
docValues.setDocument(docId);
|
||||
int count = docValues.count();
|
||||
for (int i = 0; i < count; i++) {
|
||||
long encoded = docValues.valueAt(i);
|
||||
int latitudeBits = (int)(encoded >> 32);
|
||||
int longitudeBits = (int)(encoded & 0xFFFFFFFF);
|
||||
if (grid.contains(latitudeBits, longitudeBits)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public float matchCost() {
|
||||
return matchCost;
|
||||
}
|
||||
};
|
||||
return new ConstantScoreScorer(this, score(), iterator);
|
||||
return new ConstantScoreScorer(this, score(), disi);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -0,0 +1,401 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.geo.Polygon;
|
||||
import org.apache.lucene.index.PointValues.Relation;
|
||||
|
||||
/**
|
||||
* 2D polygon implementation represented as a randomized interval tree of edges.
|
||||
* <p>
|
||||
* contains() and crosses() are still O(n), but for most practical polygons
|
||||
* are much faster than brute force.
|
||||
* <p>
|
||||
* Loosely based on the algorithm described in <a href="http://www-ma2.upc.es/geoc/Schirra-pointPolygon.pdf">
|
||||
* http://www-ma2.upc.es/geoc/Schirra-pointPolygon.pdf</a>.
|
||||
*/
|
||||
// Both Polygon.contains() and Polygon.crossesSlowly() loop all edges, and first check that the edge is within a range.
|
||||
// we just organize the edges to do the same computations on the same subset of edges more efficiently.
|
||||
// TODO: clean this up, call it Polygon2D, and remove all the 2D methods from Polygon?
|
||||
final class LatLonTree {
|
||||
private final LatLonTree[] holes;
|
||||
|
||||
/** minimum latitude of this polygon's bounding box area */
|
||||
final double minLat;
|
||||
/** maximum latitude of this polygon's bounding box area */
|
||||
final double maxLat;
|
||||
/** minimum longitude of this polygon's bounding box area */
|
||||
final double minLon;
|
||||
/** maximum longitude of this polygon's bounding box area */
|
||||
final double maxLon;
|
||||
|
||||
/** root node of our tree */
|
||||
final Edge tree;
|
||||
|
||||
// TODO: "pack" all the gons and holes into one tree with separator.
|
||||
// the algorithms support this, but we have to be careful.
|
||||
LatLonTree(Polygon polygon, LatLonTree... holes) {
|
||||
this.holes = holes.clone();
|
||||
this.minLat = polygon.minLat;
|
||||
this.maxLat = polygon.maxLat;
|
||||
this.minLon = polygon.minLon;
|
||||
this.maxLon = polygon.maxLon;
|
||||
|
||||
// create interval tree of edges
|
||||
this.tree = createTree(polygon.getPolyLats(), polygon.getPolyLons());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the point is contained within this polygon.
|
||||
* <p>
|
||||
* See <a href="https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html">
|
||||
* https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html</a> for more information.
|
||||
*/
|
||||
boolean contains(double latitude, double longitude) {
|
||||
// check bounding box
|
||||
if (latitude < minLat || latitude > maxLat || longitude < minLon || longitude > maxLon) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (tree.contains(latitude, longitude)) {
|
||||
for (LatLonTree hole : holes) {
|
||||
if (hole.contains(latitude, longitude)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/** Returns relation to the provided rectangle */
|
||||
Relation relate(double minLat, double maxLat, double minLon, double maxLon) {
|
||||
// if the bounding boxes are disjoint then the shape does not cross
|
||||
if (maxLon < this.minLon || minLon > this.maxLon || maxLat < this.minLat || minLat > this.maxLat) {
|
||||
return Relation.CELL_OUTSIDE_QUERY;
|
||||
}
|
||||
// if the rectangle fully encloses us, we cross.
|
||||
if (minLat <= this.minLat && maxLat >= this.maxLat && minLon <= this.minLon && maxLon >= this.maxLon) {
|
||||
return Relation.CELL_CROSSES_QUERY;
|
||||
}
|
||||
// check any holes
|
||||
for (LatLonTree hole : holes) {
|
||||
Relation holeRelation = hole.relate(minLat, maxLat, minLon, maxLon);
|
||||
if (holeRelation == Relation.CELL_CROSSES_QUERY) {
|
||||
return Relation.CELL_CROSSES_QUERY;
|
||||
} else if (holeRelation == Relation.CELL_INSIDE_QUERY) {
|
||||
return Relation.CELL_OUTSIDE_QUERY;
|
||||
}
|
||||
}
|
||||
// check each corner: if < 4 are present, its cheaper than crossesSlowly
|
||||
int numCorners = numberOfCorners(minLat, maxLat, minLon, maxLon);
|
||||
if (numCorners == 4) {
|
||||
if (tree.crosses(minLat, maxLat, minLon, maxLon)) {
|
||||
return Relation.CELL_CROSSES_QUERY;
|
||||
}
|
||||
return Relation.CELL_INSIDE_QUERY;
|
||||
} else if (numCorners > 0) {
|
||||
return Relation.CELL_CROSSES_QUERY;
|
||||
}
|
||||
|
||||
// we cross
|
||||
if (tree.crosses(minLat, maxLat, minLon, maxLon)) {
|
||||
return Relation.CELL_CROSSES_QUERY;
|
||||
}
|
||||
|
||||
return Relation.CELL_OUTSIDE_QUERY;
|
||||
}
|
||||
|
||||
// returns 0, 4, or something in between
|
||||
private int numberOfCorners(double minLat, double maxLat, double minLon, double maxLon) {
|
||||
int containsCount = 0;
|
||||
if (contains(minLat, minLon)) {
|
||||
containsCount++;
|
||||
}
|
||||
if (contains(minLat, maxLon)) {
|
||||
containsCount++;
|
||||
}
|
||||
if (containsCount == 1) {
|
||||
return containsCount;
|
||||
}
|
||||
if (contains(maxLat, maxLon)) {
|
||||
containsCount++;
|
||||
}
|
||||
if (containsCount == 2) {
|
||||
return containsCount;
|
||||
}
|
||||
if (contains(maxLat, minLon)) {
|
||||
containsCount++;
|
||||
}
|
||||
return containsCount;
|
||||
}
|
||||
|
||||
/** Helper for multipolygon logic: returns true if any of the supplied polygons contain the point */
|
||||
static boolean contains(LatLonTree[] polygons, double latitude, double longitude) {
|
||||
for (LatLonTree polygon : polygons) {
|
||||
if (polygon.contains(latitude, longitude)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/** Returns the multipolygon relation for the rectangle */
|
||||
static Relation relate(LatLonTree[] polygons, double minLat, double maxLat, double minLon, double maxLon) {
|
||||
for (LatLonTree polygon : polygons) {
|
||||
Relation relation = polygon.relate(minLat, maxLat, minLon, maxLon);
|
||||
if (relation != Relation.CELL_OUTSIDE_QUERY) {
|
||||
// note: we optimize for non-overlapping multipolygons. so if we cross one,
|
||||
// we won't keep iterating to try to find a contains.
|
||||
return relation;
|
||||
}
|
||||
}
|
||||
return Relation.CELL_OUTSIDE_QUERY;
|
||||
}
|
||||
|
||||
/** Builds a tree from multipolygon */
|
||||
static LatLonTree[] build(Polygon... polygons) {
|
||||
// TODO: use one tree with separators (carefully!)
|
||||
LatLonTree trees[] = new LatLonTree[polygons.length];
|
||||
for (int i = 0; i < trees.length; i++) {
|
||||
Polygon gon = polygons[i];
|
||||
Polygon gonHoles[] = gon.getHoles();
|
||||
LatLonTree holes[] = new LatLonTree[gonHoles.length];
|
||||
for (int j = 0; j < holes.length; j++) {
|
||||
holes[j] = new LatLonTree(gonHoles[j]);
|
||||
}
|
||||
trees[i] = new LatLonTree(gon, holes);
|
||||
}
|
||||
return trees;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal tree node: represents polygon edge from lat1,lon1 to lat2,lon2.
|
||||
* The sort value is {@code low}, which is the minimum latitude of the edge.
|
||||
* {@code max} stores the maximum latitude of this edge or any children.
|
||||
*/
|
||||
static final class Edge {
|
||||
// lat-lon pair (in original order) of the two vertices
|
||||
final double lat1, lat2;
|
||||
final double lon1, lon2;
|
||||
/** min of this edge */
|
||||
final double low;
|
||||
/** max latitude of this edge or any children */
|
||||
double max;
|
||||
|
||||
/** left child edge, or null */
|
||||
Edge left;
|
||||
/** right child edge, or null */
|
||||
Edge right;
|
||||
|
||||
Edge(double lat1, double lon1, double lat2, double lon2, double low, double max) {
|
||||
this.lat1 = lat1;
|
||||
this.lon1 = lon1;
|
||||
this.lat2 = lat2;
|
||||
this.lon2 = lon2;
|
||||
this.low = low;
|
||||
this.max = max;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the point crosses this edge subtree an odd number of times
|
||||
* <p>
|
||||
* See <a href="https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html">
|
||||
* https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html</a> for more information.
|
||||
*/
|
||||
// ported to java from https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html
|
||||
// original code under the BSD license (https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#License%20to%20Use)
|
||||
//
|
||||
// Copyright (c) 1970-2003, Wm. Randolph Franklin
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
|
||||
// documentation files (the "Software"), to deal in the Software without restriction, including without limitation
|
||||
// the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
|
||||
// to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// 1. Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimers.
|
||||
// 2. Redistributions in binary form must reproduce the above copyright
|
||||
// notice in the documentation and/or other materials provided with
|
||||
// the distribution.
|
||||
// 3. The name of W. Randolph Franklin may not be used to endorse or
|
||||
// promote products derived from this Software without specific
|
||||
// prior written permission.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
// CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
// IN THE SOFTWARE.
|
||||
boolean contains(double latitude, double longitude) {
|
||||
// crossings algorithm is an odd-even algorithm, so we descend the tree xor'ing results along our path
|
||||
boolean res = false;
|
||||
if (latitude <= max) {
|
||||
if (lat1 > latitude != lat2 > latitude) {
|
||||
if (longitude < (lon1 - lon2) * (latitude - lat2) / (lat1 - lat2) + lon2) {
|
||||
res = true;
|
||||
}
|
||||
}
|
||||
if (left != null) {
|
||||
res ^= left.contains(latitude, longitude);
|
||||
}
|
||||
if (right != null && latitude >= low) {
|
||||
res ^= right.contains(latitude, longitude);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/** Returns true if the box crosses any edge in this edge subtree */
|
||||
boolean crosses(double minLat, double maxLat, double minLon, double maxLon) {
|
||||
// we just have to cross one edge to answer the question, so we descend the tree and return when we do.
|
||||
if (minLat <= max) {
|
||||
// we compute line intersections of every polygon edge with every box line.
|
||||
// if we find one, return true.
|
||||
// for each box line (AB):
|
||||
// for each poly line (CD):
|
||||
// intersects = orient(C,D,A) * orient(C,D,B) <= 0 && orient(A,B,C) * orient(A,B,D) <= 0
|
||||
double cy = lat1;
|
||||
double dy = lat2;
|
||||
double cx = lon1;
|
||||
double dx = lon2;
|
||||
|
||||
// optimization: see if the rectangle is outside of the "bounding box" of the polyline at all
|
||||
// if not, don't waste our time trying more complicated stuff
|
||||
boolean outside = (cy < minLat && dy < minLat) ||
|
||||
(cy > maxLat && dy > maxLat) ||
|
||||
(cx < minLon && dx < minLon) ||
|
||||
(cx > maxLon && dx > maxLon);
|
||||
if (outside == false) {
|
||||
// does box's top edge intersect polyline?
|
||||
// ax = minLon, bx = maxLon, ay = maxLat, by = maxLat
|
||||
if (orient(cx, cy, dx, dy, minLon, maxLat) * orient(cx, cy, dx, dy, maxLon, maxLat) <= 0 &&
|
||||
orient(minLon, maxLat, maxLon, maxLat, cx, cy) * orient(minLon, maxLat, maxLon, maxLat, dx, dy) <= 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// does box's right edge intersect polyline?
|
||||
// ax = maxLon, bx = maxLon, ay = maxLat, by = minLat
|
||||
if (orient(cx, cy, dx, dy, maxLon, maxLat) * orient(cx, cy, dx, dy, maxLon, minLat) <= 0 &&
|
||||
orient(maxLon, maxLat, maxLon, minLat, cx, cy) * orient(maxLon, maxLat, maxLon, minLat, dx, dy) <= 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// does box's bottom edge intersect polyline?
|
||||
// ax = maxLon, bx = minLon, ay = minLat, by = minLat
|
||||
if (orient(cx, cy, dx, dy, maxLon, minLat) * orient(cx, cy, dx, dy, minLon, minLat) <= 0 &&
|
||||
orient(maxLon, minLat, minLon, minLat, cx, cy) * orient(maxLon, minLat, minLon, minLat, dx, dy) <= 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// does box's left edge intersect polyline?
|
||||
// ax = minLon, bx = minLon, ay = minLat, by = maxLat
|
||||
if (orient(cx, cy, dx, dy, minLon, minLat) * orient(cx, cy, dx, dy, minLon, maxLat) <= 0 &&
|
||||
orient(minLon, minLat, minLon, maxLat, cx, cy) * orient(minLon, minLat, minLon, maxLat, dx, dy) <= 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (left != null) {
|
||||
if (left.crosses(minLat, maxLat, minLon, maxLon)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (right != null && maxLat >= low) {
|
||||
if (right.crosses(minLat, maxLat, minLon, maxLon)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an edge interval tree from a set of polygon vertices.
|
||||
* @return root node of the tree.
|
||||
*/
|
||||
private static Edge createTree(double polyLats[], double polyLons[]) {
|
||||
// edge order is deterministic and reproducible based on the double values.
|
||||
// TODO: make a real balanced tree instead :)
|
||||
List<Integer> list = new ArrayList<Integer>(polyLats.length - 1);
|
||||
for (int i = 1; i < polyLats.length; i++) {
|
||||
list.add(i);
|
||||
}
|
||||
Collections.shuffle(list, new Random(Arrays.hashCode(polyLats) ^ Arrays.hashCode(polyLons)));
|
||||
Edge root = null;
|
||||
for (int i : list) {
|
||||
double lat1 = polyLats[i-1];
|
||||
double lon1 = polyLons[i-1];
|
||||
double lat2 = polyLats[i];
|
||||
double lon2 = polyLons[i];
|
||||
Edge newNode = new Edge(lat1, lon1, lat2, lon2, Math.min(lat1, lat2), Math.max(lat1, lat2));
|
||||
if (root == null) {
|
||||
// add first node
|
||||
root = newNode;
|
||||
} else {
|
||||
// traverse tree to find home for new node, along the path updating all parent's max value along the way.
|
||||
Edge node = root;
|
||||
while (true) {
|
||||
node.max = Math.max(node.max, newNode.max);
|
||||
if (newNode.low < node.low) {
|
||||
if (node.left == null) {
|
||||
node.left = newNode;
|
||||
break;
|
||||
}
|
||||
node = node.left;
|
||||
} else {
|
||||
if (node.right == null) {
|
||||
node.right = newNode;
|
||||
break;
|
||||
}
|
||||
node = node.right;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return root;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a positive value if points a, b, and c are arranged in counter-clockwise order,
|
||||
* negative value if clockwise, zero if collinear.
|
||||
*/
|
||||
// see the "Orient2D" method described here:
|
||||
// http://www.cs.berkeley.edu/~jrs/meshpapers/robnotes.pdf
|
||||
// https://www.cs.cmu.edu/~quake/robust.html
|
||||
// Note that this one does not yet have the floating point tricks to be exact!
|
||||
private static int orient(double ax, double ay, double bx, double by, double cx, double cy) {
|
||||
double v1 = (bx - ax) * (cy - ay);
|
||||
double v2 = (cx - ax) * (by - ay);
|
||||
if (v1 > v2) {
|
||||
return 1;
|
||||
} else if (v1 < v2) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
import org.apache.lucene.geo.GeoTestUtil;
|
||||
import org.apache.lucene.geo.Polygon;
|
||||
import org.apache.lucene.geo.Rectangle;
|
||||
import org.apache.lucene.index.PointValues.Relation;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/** Test LatLonTree against the slower implementation for now */
|
||||
public class TestLatLonTree extends LuceneTestCase {
|
||||
|
||||
/** test that contains() works the same as brute force */
|
||||
public void testContainsRandom() {
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
Polygon polygon = GeoTestUtil.nextPolygon();
|
||||
LatLonTree tree = new LatLonTree(polygon);
|
||||
for (int j = 0; j < 1000; j++) {
|
||||
double point[] = GeoTestUtil.nextPointNear(polygon);
|
||||
boolean expected = polygon.contains(point[0], point[1]);
|
||||
assertEquals(expected, tree.contains(point[0], point[1]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** test that relate() works the same as brute force */
|
||||
public void testRelateRandom() {
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
Polygon polygon = GeoTestUtil.nextPolygon();
|
||||
LatLonTree tree = new LatLonTree(polygon);
|
||||
for (int j = 0; j < 1000; j++) {
|
||||
Rectangle box = GeoTestUtil.nextBoxNear(polygon);
|
||||
Relation expected = polygon.relate(box.minLat, box.maxLat, box.minLon, box.maxLon);
|
||||
assertEquals(expected, tree.relate(box.minLat, box.maxLat, box.minLon, box.maxLon));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -22,8 +22,14 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.geo.GeoUtils;
|
||||
import org.apache.lucene.util.BitUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
|
||||
import static org.apache.lucene.spatial.util.MortonEncoder.encode;
|
||||
import static org.apache.lucene.geo.GeoUtils.MIN_LAT_INCL;
|
||||
import static org.apache.lucene.geo.GeoUtils.MIN_LON_INCL;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
|
@ -49,6 +55,19 @@ public final class GeoPointField extends Field {
|
|||
/** encoding step value for GeoPoint prefix terms */
|
||||
public static final int PRECISION_STEP = 9;
|
||||
|
||||
/** number of bits used for quantizing latitude and longitude values */
|
||||
public static final short BITS = 31;
|
||||
/** scaling factors to convert lat/lon into unsigned space */
|
||||
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
|
||||
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
|
||||
|
||||
/**
|
||||
* The maximum term length (used for <code>byte[]</code> buffer size)
|
||||
* for encoding <code>geoEncoded</code> values.
|
||||
* @see #geoCodedToPrefixCodedBytes(long, int, BytesRefBuilder)
|
||||
*/
|
||||
private static final int BUF_SIZE_LONG = 28/8 + 1;
|
||||
|
||||
/**
|
||||
* <b>Expert:</b> Optional flag to select term encoding for GeoPointField types
|
||||
*/
|
||||
|
@ -185,7 +204,7 @@ public final class GeoPointField extends Field {
|
|||
}
|
||||
|
||||
// set field data
|
||||
fieldsData = GeoEncodingUtils.mortonHash(latitude, longitude);
|
||||
fieldsData = encodeLatLon(latitude, longitude);
|
||||
}
|
||||
|
||||
private static FieldType getFieldType(Store stored) {
|
||||
|
@ -232,12 +251,12 @@ public final class GeoPointField extends Field {
|
|||
|
||||
/** access latitude value */
|
||||
public double getLat() {
|
||||
return GeoEncodingUtils.mortonUnhashLat((long) fieldsData);
|
||||
return decodeLatitude((long) fieldsData);
|
||||
}
|
||||
|
||||
/** access longitude value */
|
||||
public double getLon() {
|
||||
return GeoEncodingUtils.mortonUnhashLon((long) fieldsData);
|
||||
return decodeLongitude((long) fieldsData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -246,9 +265,88 @@ public final class GeoPointField extends Field {
|
|||
return null;
|
||||
}
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(GeoEncodingUtils.mortonUnhashLat((long) fieldsData));
|
||||
sb.append(decodeLatitude((long) fieldsData));
|
||||
sb.append(',');
|
||||
sb.append(GeoEncodingUtils.mortonUnhashLon((long) fieldsData));
|
||||
sb.append(decodeLongitude((long) fieldsData));
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/*************************
|
||||
* 31 bit encoding utils *
|
||||
*************************/
|
||||
public static long encodeLatLon(final double lat, final double lon) {
|
||||
long result = encode(lat, lon);
|
||||
if (result == 0xFFFFFFFFFFFFFFFFL) {
|
||||
return result & 0xC000000000000000L;
|
||||
}
|
||||
return result >>> 2;
|
||||
}
|
||||
|
||||
/** decode longitude value from morton encoded geo point */
|
||||
public static final double decodeLongitude(final long hash) {
|
||||
return unscaleLon(BitUtil.deinterleave(hash));
|
||||
}
|
||||
|
||||
/** decode latitude value from morton encoded geo point */
|
||||
public static final double decodeLatitude(final long hash) {
|
||||
return unscaleLat(BitUtil.deinterleave(hash >>> 1));
|
||||
}
|
||||
|
||||
private static final double unscaleLon(final long val) {
|
||||
return (val / LON_SCALE) + MIN_LON_INCL;
|
||||
}
|
||||
|
||||
private static final double unscaleLat(final long val) {
|
||||
return (val / LAT_SCALE) + MIN_LAT_INCL;
|
||||
}
|
||||
|
||||
/** Convert a geocoded morton long into a prefix coded geo term */
|
||||
public static void geoCodedToPrefixCoded(long hash, int shift, BytesRefBuilder bytes) {
|
||||
geoCodedToPrefixCodedBytes(hash, shift, bytes);
|
||||
}
|
||||
|
||||
/** Convert a prefix coded geo term back into the geocoded morton long */
|
||||
public static long prefixCodedToGeoCoded(final BytesRef val) {
|
||||
final long result = 0L
|
||||
| (val.bytes[val.offset+0] & 255L) << 24
|
||||
| (val.bytes[val.offset+1] & 255L) << 16
|
||||
| (val.bytes[val.offset+2] & 255L) << 8
|
||||
| val.bytes[val.offset+3] & 255L;
|
||||
|
||||
return result << 32;
|
||||
}
|
||||
|
||||
/**
|
||||
* GeoTerms are coded using 4 prefix bytes + 1 byte to record number of prefix bits
|
||||
*
|
||||
* example prefix at shift 54 (yields 10 significant prefix bits):
|
||||
* pppppppp pp000000 00000000 00000000 00001010
|
||||
* (byte 1) (byte 2) (byte 3) (byte 4) (sigbits)
|
||||
*/
|
||||
private static void geoCodedToPrefixCodedBytes(final long hash, final int shift, final BytesRefBuilder bytes) {
|
||||
// ensure shift is 32..63
|
||||
if (shift < 32 || shift > 63) {
|
||||
throw new IllegalArgumentException("Illegal shift value, must be 32..63; got shift=" + shift);
|
||||
}
|
||||
int nChars = BUF_SIZE_LONG + 1; // one extra for the byte that contains the number of significant bits
|
||||
bytes.setLength(nChars);
|
||||
bytes.grow(nChars--);
|
||||
final int sigBits = 64 - shift;
|
||||
bytes.setByteAt(BUF_SIZE_LONG, (byte)(sigBits));
|
||||
long sortableBits = hash;
|
||||
sortableBits >>>= shift;
|
||||
sortableBits <<= 32 - sigBits;
|
||||
do {
|
||||
bytes.setByteAt(--nChars, (byte)(sortableBits));
|
||||
sortableBits >>>= 8;
|
||||
} while (nChars > 0);
|
||||
}
|
||||
|
||||
/** Get the prefix coded geo term shift value */
|
||||
public static int getPrefixCodedShift(final BytesRef val) {
|
||||
final int shift = val.bytes[val.offset + BUF_SIZE_LONG];
|
||||
if (shift > 63 || shift < 0)
|
||||
throw new NumberFormatException("Invalid shift value (" + shift + ") in prefixCoded bytes (is encoded value really a geo point?)");
|
||||
return shift;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,8 +29,8 @@ import org.apache.lucene.util.AttributeImpl;
|
|||
import org.apache.lucene.util.AttributeReflector;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
|
||||
import static org.apache.lucene.spatial.geopoint.document.GeoPointField.geoCodedToPrefixCoded;
|
||||
import static org.apache.lucene.spatial.geopoint.document.GeoPointField.PRECISION_STEP;
|
||||
|
||||
/**
|
||||
|
@ -163,7 +163,7 @@ final class GeoPointTokenStream extends TokenStream {
|
|||
|
||||
@Override
|
||||
public BytesRef getBytesRef() {
|
||||
GeoEncodingUtils.geoCodedToPrefixCoded(value, shift, bytes);
|
||||
geoCodedToPrefixCoded(value, shift, bytes);
|
||||
return bytes.get();
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField.TermEncoding;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.geo.Rectangle;
|
||||
import org.apache.lucene.geo.Polygon;
|
||||
|
||||
|
|
|
@ -23,11 +23,12 @@ import java.util.List;
|
|||
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
|
||||
import static org.apache.lucene.spatial.geopoint.document.GeoPointField.BITS;
|
||||
|
||||
/**
|
||||
* Decomposes a given {@link GeoPointMultiTermQuery} into a set of terms that represent the query criteria using
|
||||
* {@link org.apache.lucene.spatial.geopoint.document.GeoPointField.TermEncoding#NUMERIC} method defined by
|
||||
|
@ -46,8 +47,8 @@ final class GeoPointNumericTermsEnum extends GeoPointTermsEnum {
|
|||
|
||||
GeoPointNumericTermsEnum(final TermsEnum tenum, final GeoPointMultiTermQuery query) {
|
||||
super(tenum, query);
|
||||
DETAIL_LEVEL = (short)(((GeoEncodingUtils.BITS<<1)-this.maxShift)/2);
|
||||
computeRange(0L, (short) (((GeoEncodingUtils.BITS) << 1) - 1));
|
||||
DETAIL_LEVEL = (short)(((BITS<<1)-this.maxShift)/2);
|
||||
computeRange(0L, (short) (((BITS) << 1) - 1));
|
||||
assert rangeBounds.isEmpty() == false;
|
||||
Collections.sort(rangeBounds);
|
||||
}
|
||||
|
@ -79,12 +80,12 @@ final class GeoPointNumericTermsEnum extends GeoPointTermsEnum {
|
|||
* @param res spatial res represented as a bit shift (MSB is lower res)
|
||||
*/
|
||||
private void relateAndRecurse(final long start, final long end, final short res) {
|
||||
final double minLon = GeoEncodingUtils.mortonUnhashLon(start);
|
||||
final double minLat = GeoEncodingUtils.mortonUnhashLat(start);
|
||||
final double maxLon = GeoEncodingUtils.mortonUnhashLon(end);
|
||||
final double maxLat = GeoEncodingUtils.mortonUnhashLat(end);
|
||||
final double minLon = GeoPointField.decodeLongitude(start);
|
||||
final double minLat = GeoPointField.decodeLatitude(start);
|
||||
final double maxLon = GeoPointField.decodeLongitude(end);
|
||||
final double maxLat = GeoPointField.decodeLatitude(end);
|
||||
|
||||
final short level = (short)((GeoEncodingUtils.BITS<<1)-res>>>1);
|
||||
final short level = (short)((BITS<<1)-res>>>1);
|
||||
|
||||
// if cell is within and a factor of the precision step, or it crosses the edge of the shape add the range
|
||||
final boolean within = res % GeoPointField.PRECISION_STEP == 0 && relationImpl.cellWithin(minLat, maxLat, minLon, maxLon);
|
||||
|
|
|
@ -21,14 +21,10 @@ import org.apache.lucene.index.TermsEnum;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonHash;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLat;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLon;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.geoCodedToPrefixCoded;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.prefixCodedToGeoCoded;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.getPrefixCodedShift;
|
||||
import static org.apache.lucene.spatial.geopoint.document.GeoPointField.geoCodedToPrefixCoded;
|
||||
import static org.apache.lucene.spatial.geopoint.document.GeoPointField.prefixCodedToGeoCoded;
|
||||
import static org.apache.lucene.spatial.geopoint.document.GeoPointField.getPrefixCodedShift;
|
||||
|
||||
/**
|
||||
* Decomposes a given {@link GeoPointMultiTermQuery} into a set of terms that represent the query criteria using
|
||||
|
@ -58,7 +54,7 @@ final class GeoPointPrefixTermsEnum extends GeoPointTermsEnum {
|
|||
|
||||
public GeoPointPrefixTermsEnum(final TermsEnum tenum, final GeoPointMultiTermQuery query) {
|
||||
super(tenum, query);
|
||||
this.start = mortonHash(query.minLat, query.minLon);
|
||||
this.start = GeoPointField.encodeLatLon(query.minLat, query.minLon);
|
||||
this.currentRange = new Range(0, shift, true);
|
||||
// start shift at maxShift value (from computeMaxShift)
|
||||
this.shift = maxShift;
|
||||
|
@ -90,14 +86,14 @@ final class GeoPointPrefixTermsEnum extends GeoPointTermsEnum {
|
|||
}
|
||||
|
||||
private void nextRelation() {
|
||||
double minLon = mortonUnhashLon(currStart);
|
||||
double minLat = mortonUnhashLat(currStart);
|
||||
double minLon = GeoPointField.decodeLongitude(currStart);
|
||||
double minLat = GeoPointField.decodeLatitude(currStart);
|
||||
double maxLon;
|
||||
double maxLat;
|
||||
boolean isWithin;
|
||||
do {
|
||||
maxLon = mortonUnhashLon(currEnd);
|
||||
maxLat = mortonUnhashLat(currEnd);
|
||||
maxLon = GeoPointField.decodeLongitude(currEnd);
|
||||
maxLat = GeoPointField.decodeLatitude(currEnd);
|
||||
|
||||
isWithin = false;
|
||||
// within or a boundary
|
||||
|
@ -123,8 +119,8 @@ final class GeoPointPrefixTermsEnum extends GeoPointTermsEnum {
|
|||
currEnd = currStart | (1L<<--shift) - 1;
|
||||
} else {
|
||||
advanceVariables();
|
||||
minLon = mortonUnhashLon(currStart);
|
||||
minLat = mortonUnhashLat(currStart);
|
||||
minLon = GeoPointField.decodeLongitude(currStart);
|
||||
minLat = GeoPointField.decodeLatitude(currStart);
|
||||
}
|
||||
} while(shift < 63);
|
||||
}
|
||||
|
@ -194,7 +190,7 @@ final class GeoPointPrefixTermsEnum extends GeoPointTermsEnum {
|
|||
|
||||
final int comparison = term.compareTo(currentCell);
|
||||
if (comparison > 0) {
|
||||
seek(GeoEncodingUtils.prefixCodedToGeoCoded(term), (short)(64-GeoEncodingUtils.getPrefixCodedShift(term)));
|
||||
seek(prefixCodedToGeoCoded(term), (short)(64 - getPrefixCodedShift(term)));
|
||||
continue;
|
||||
}
|
||||
return currentCell;
|
||||
|
|
|
@ -32,15 +32,12 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.TwoPhaseIterator;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.DocIdSetBuilder;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.apache.lucene.util.SparseFixedBitSet;
|
||||
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLat;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLon;
|
||||
|
||||
|
||||
/**
|
||||
* Custom ConstantScoreWrapper for {@code GeoPointMultiTermQuery} that cuts over to DocValues
|
||||
* for post filtering boundary ranges. Multi-valued GeoPoint documents are supported.
|
||||
|
@ -140,7 +137,7 @@ final class GeoPointTermQueryConstantScoreWrapper <Q extends GeoPointMultiTermQu
|
|||
int count = sdv.count();
|
||||
for (int i = 0; i < count; i++) {
|
||||
long hash = sdv.valueAt(i);
|
||||
if (termsEnum.postFilter(mortonUnhashLat(hash), mortonUnhashLon(hash))) {
|
||||
if (termsEnum.postFilter(GeoPointField.decodeLatitude(hash), GeoPointField.decodeLongitude(hash))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,146 +0,0 @@
|
|||
package org.apache.lucene.spatial.util;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.util.BitUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
|
||||
import static org.apache.lucene.geo.GeoUtils.MIN_LON_INCL;
|
||||
import static org.apache.lucene.geo.GeoUtils.MIN_LAT_INCL;
|
||||
|
||||
/**
|
||||
* Basic reusable geopoint encoding methods
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public final class GeoEncodingUtils {
|
||||
/** number of bits used for quantizing latitude and longitude values */
|
||||
public static final short BITS = 31;
|
||||
|
||||
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
|
||||
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
|
||||
|
||||
/**
|
||||
* The maximum term length (used for <code>byte[]</code> buffer size)
|
||||
* for encoding <code>geoEncoded</code> values.
|
||||
* @see #geoCodedToPrefixCodedBytes(long, int, BytesRefBuilder)
|
||||
*/
|
||||
private static final int BUF_SIZE_LONG = 28/8 + 1;
|
||||
|
||||
// No instance:
|
||||
private GeoEncodingUtils() {
|
||||
}
|
||||
|
||||
/**
|
||||
* encode latitude, longitude geopoint values using morton encoding method
|
||||
* https://en.wikipedia.org/wiki/Z-order_curve
|
||||
*/
|
||||
public static final Long mortonHash(final double lat, final double lon) {
|
||||
return BitUtil.interleave(scaleLon(lon), scaleLat(lat));
|
||||
}
|
||||
|
||||
/** decode longitude value from morton encoded geo point */
|
||||
public static final double mortonUnhashLon(final long hash) {
|
||||
return unscaleLon(BitUtil.deinterleave(hash));
|
||||
}
|
||||
|
||||
/** decode latitude value from morton encoded geo point */
|
||||
public static final double mortonUnhashLat(final long hash) {
|
||||
return unscaleLat(BitUtil.deinterleave(hash >>> 1));
|
||||
}
|
||||
|
||||
private static final long scaleLon(final double val) {
|
||||
return (long) ((val-MIN_LON_INCL) * LON_SCALE);
|
||||
}
|
||||
|
||||
private static final long scaleLat(final double val) {
|
||||
return (long) ((val-MIN_LAT_INCL) * LAT_SCALE);
|
||||
}
|
||||
|
||||
private static final double unscaleLon(final long val) {
|
||||
return (val / LON_SCALE) + MIN_LON_INCL;
|
||||
}
|
||||
|
||||
private static final double unscaleLat(final long val) {
|
||||
return (val / LAT_SCALE) + MIN_LAT_INCL;
|
||||
}
|
||||
|
||||
/** Convert a geocoded morton long into a prefix coded geo term */
|
||||
public static void geoCodedToPrefixCoded(long hash, int shift, BytesRefBuilder bytes) {
|
||||
geoCodedToPrefixCodedBytes(hash, shift, bytes);
|
||||
}
|
||||
|
||||
/** Convert a prefix coded geo term back into the geocoded morton long */
|
||||
public static long prefixCodedToGeoCoded(final BytesRef val) {
|
||||
final long result = 0L
|
||||
| (val.bytes[val.offset+0] & 255L) << 24
|
||||
| (val.bytes[val.offset+1] & 255L) << 16
|
||||
| (val.bytes[val.offset+2] & 255L) << 8
|
||||
| val.bytes[val.offset+3] & 255L;
|
||||
|
||||
return result << 32;
|
||||
}
|
||||
|
||||
/**
|
||||
* GeoTerms are coded using 4 prefix bytes + 1 byte to record number of prefix bits
|
||||
*
|
||||
* example prefix at shift 54 (yields 10 significant prefix bits):
|
||||
* pppppppp pp000000 00000000 00000000 00001010
|
||||
* (byte 1) (byte 2) (byte 3) (byte 4) (sigbits)
|
||||
*/
|
||||
private static void geoCodedToPrefixCodedBytes(final long hash, final int shift, final BytesRefBuilder bytes) {
|
||||
// ensure shift is 32..63
|
||||
if (shift < 32 || shift > 63) {
|
||||
throw new IllegalArgumentException("Illegal shift value, must be 32..63; got shift=" + shift);
|
||||
}
|
||||
int nChars = BUF_SIZE_LONG + 1; // one extra for the byte that contains the number of significant bits
|
||||
bytes.setLength(nChars);
|
||||
bytes.grow(nChars--);
|
||||
final int sigBits = 64 - shift;
|
||||
bytes.setByteAt(BUF_SIZE_LONG, (byte)(sigBits));
|
||||
long sortableBits = hash;
|
||||
sortableBits >>>= shift;
|
||||
sortableBits <<= 32 - sigBits;
|
||||
do {
|
||||
bytes.setByteAt(--nChars, (byte)(sortableBits));
|
||||
sortableBits >>>= 8;
|
||||
} while (nChars > 0);
|
||||
}
|
||||
|
||||
/** Get the prefix coded geo term shift value */
|
||||
public static int getPrefixCodedShift(final BytesRef val) {
|
||||
final int shift = val.bytes[val.offset + BUF_SIZE_LONG];
|
||||
if (shift > 63 || shift < 0)
|
||||
throw new NumberFormatException("Invalid shift value (" + shift + ") in prefixCoded bytes (is encoded value really a geo point?)");
|
||||
return shift;
|
||||
}
|
||||
|
||||
/** Converts a long value into a bit string (useful for debugging) */
|
||||
public static String geoTermToString(long term) {
|
||||
StringBuilder s = new StringBuilder(64);
|
||||
final int numberOfLeadingZeros = Long.numberOfLeadingZeros(term);
|
||||
for (int i = 0; i < numberOfLeadingZeros; i++) {
|
||||
s.append('0');
|
||||
}
|
||||
if (term != 0) {
|
||||
s.append(Long.toBinaryString(term));
|
||||
}
|
||||
return s.toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.spatial.util;
|
||||
|
||||
import org.apache.lucene.geo.GeoEncodingUtils;
|
||||
import org.apache.lucene.util.BitUtil;
|
||||
|
||||
import static org.apache.lucene.geo.GeoUtils.checkLatitude;
|
||||
import static org.apache.lucene.geo.GeoUtils.checkLongitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitudeCeil;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitudeCeil;
|
||||
|
||||
/**
|
||||
* Quantizes lat/lon points and bit interleaves them into a binary morton code
|
||||
* in the range of 0x00000000... : 0xFFFFFFFF...
|
||||
* https://en.wikipedia.org/wiki/Z-order_curve
|
||||
*
|
||||
* This is useful for bitwise operations in raster space
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class MortonEncoder {
|
||||
|
||||
private MortonEncoder() {} // no instance
|
||||
|
||||
/**
|
||||
* Main encoding method to quantize lat/lon points and bit interleave them into a binary morton code
|
||||
* in the range of 0x00000000... : 0xFFFFFFFF...
|
||||
*
|
||||
* @param latitude latitude value: must be within standard +/-90 coordinate bounds.
|
||||
* @param longitude longitude value: must be within standard +/-180 coordinate bounds.
|
||||
* @return bit interleaved encoded values as a 64-bit {@code long}
|
||||
* @throws IllegalArgumentException if latitude or longitude is out of bounds
|
||||
*/
|
||||
public static final long encode(double latitude, double longitude) {
|
||||
checkLatitude(latitude);
|
||||
checkLongitude(longitude);
|
||||
// encode lat/lon flipping the sign bit so negative ints sort before positive ints
|
||||
final int latEnc = encodeLatitude(latitude) ^ 0x80000000;
|
||||
final int lonEnc = encodeLongitude(longitude) ^ 0x80000000;
|
||||
return BitUtil.interleave(lonEnc, latEnc);
|
||||
}
|
||||
|
||||
/**
|
||||
* Quantizes lat/lon points and bit interleaves them into a sortable morton code
|
||||
* ranging from 0x00 : 0xFF...
|
||||
* https://en.wikipedia.org/wiki/Z-order_curve
|
||||
* This is useful for bitwise operations in raster space
|
||||
* @param latitude latitude value: must be within standard +/-90 coordinate bounds.
|
||||
* @param longitude longitude value: must be within standard +/-180 coordinate bounds.
|
||||
* @return bit interleaved encoded values as a 64-bit {@code long}
|
||||
* @throws IllegalArgumentException if latitude or longitude is out of bounds
|
||||
*/
|
||||
public static final long encodeCeil(double latitude, double longitude) {
|
||||
checkLatitude(latitude);
|
||||
checkLongitude(longitude);
|
||||
// encode lat/lon flipping the sign bit so negative ints sort before positive ints
|
||||
final int latEnc = encodeLatitudeCeil(latitude) ^ 0x80000000;
|
||||
final int lonEnc = encodeLongitudeCeil(longitude) ^ 0x80000000;
|
||||
return BitUtil.interleave(lonEnc, latEnc);
|
||||
}
|
||||
|
||||
/** decode latitude value from morton encoded geo point */
|
||||
public static final double decodeLatitude(final long hash) {
|
||||
// decode lat/lon flipping the sign bit so negative ints sort before positive ints
|
||||
return GeoEncodingUtils.decodeLatitude((int) BitUtil.deinterleave(hash >>> 1) ^ 0x80000000);
|
||||
}
|
||||
|
||||
/** decode longitude value from morton encoded geo point */
|
||||
public static final double decodeLongitude(final long hash) {
|
||||
// decode lat/lon flipping the sign bit so negative ints sort before positive ints
|
||||
return GeoEncodingUtils.decodeLongitude((int) BitUtil.deinterleave(hash) ^ 0x80000000);
|
||||
}
|
||||
|
||||
/** Converts a long value into a full 64 bit string (useful for debugging) */
|
||||
public static String geoTermToString(long term) {
|
||||
StringBuilder s = new StringBuilder(64);
|
||||
final int numberOfLeadingZeros = Long.numberOfLeadingZeros(term);
|
||||
for (int i = 0; i < numberOfLeadingZeros; i++) {
|
||||
s.append('0');
|
||||
}
|
||||
if (term != 0) {
|
||||
s.append(Long.toBinaryString(term));
|
||||
}
|
||||
return s.toString();
|
||||
}
|
||||
}
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.spatial.geopoint.search;
|
|||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.geo.BaseGeoPointTestCase;
|
||||
import org.apache.lucene.geo.Polygon;
|
||||
import org.apache.lucene.geo.Rectangle;
|
||||
|
@ -34,12 +33,12 @@ public class TestGeoPointQuery extends BaseGeoPointTestCase {
|
|||
|
||||
@Override
|
||||
protected double quantizeLat(double lat) {
|
||||
return GeoEncodingUtils.mortonUnhashLat(GeoEncodingUtils.mortonHash(lat, 0));
|
||||
return GeoPointField.decodeLatitude(GeoPointField.encodeLatLon(lat, 0));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected double quantizeLon(double lon) {
|
||||
return GeoEncodingUtils.mortonUnhashLon(GeoEncodingUtils.mortonHash(0, lon));
|
||||
return GeoPointField.decodeLongitude(GeoPointField.encodeLatLon(0, lon));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.spatial.geopoint.search;
|
|||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.geo.BaseGeoPointTestCase;
|
||||
import org.apache.lucene.geo.Polygon;
|
||||
import org.apache.lucene.geo.Rectangle;
|
||||
|
@ -36,12 +35,12 @@ public class TestLegacyGeoPointQuery extends BaseGeoPointTestCase {
|
|||
|
||||
@Override
|
||||
protected double quantizeLat(double lat) {
|
||||
return GeoEncodingUtils.mortonUnhashLat(GeoEncodingUtils.mortonHash(lat, 0));
|
||||
return GeoPointField.decodeLatitude(GeoPointField.encodeLatLon(lat, 0));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected double quantizeLon(double lon) {
|
||||
return GeoEncodingUtils.mortonUnhashLon(GeoEncodingUtils.mortonHash(0, lon));
|
||||
return GeoPointField.decodeLongitude(GeoPointField.encodeLatLon(0, lon));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,102 +0,0 @@
|
|||
package org.apache.lucene.spatial.util;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
import static org.apache.lucene.geo.GeoTestUtil.nextLatitude;
|
||||
import static org.apache.lucene.geo.GeoTestUtil.nextLongitude;
|
||||
|
||||
/**
|
||||
* Tests methods in {@link GeoEncodingUtils}
|
||||
*/
|
||||
public class TestGeoEncodingUtils extends LuceneTestCase {
|
||||
/**
|
||||
* Tests stability of {@link GeoEncodingUtils#geoCodedToPrefixCoded}
|
||||
*/
|
||||
public void testGeoPrefixCoding() throws Exception {
|
||||
int numIters = atLeast(1000);
|
||||
long hash;
|
||||
long decodedHash;
|
||||
BytesRefBuilder brb = new BytesRefBuilder();
|
||||
while (numIters-- >= 0) {
|
||||
hash = GeoEncodingUtils.mortonHash(nextLatitude(), nextLongitude());
|
||||
for (int i=32; i<64; ++i) {
|
||||
GeoEncodingUtils.geoCodedToPrefixCoded(hash, i, brb);
|
||||
decodedHash = GeoEncodingUtils.prefixCodedToGeoCoded(brb.get());
|
||||
assertEquals((hash >>> i) << i, decodedHash);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testMortonEncoding() throws Exception {
|
||||
long hash = GeoEncodingUtils.mortonHash(90, 180);
|
||||
assertEquals(180.0, GeoEncodingUtils.mortonUnhashLon(hash), 0);
|
||||
assertEquals(90.0, GeoEncodingUtils.mortonUnhashLat(hash), 0);
|
||||
}
|
||||
|
||||
public void testEncodeDecode() throws Exception {
|
||||
int iters = atLeast(10000);
|
||||
for(int iter=0;iter<iters;iter++) {
|
||||
double lat = nextLatitude();
|
||||
double lon = nextLongitude();
|
||||
|
||||
long enc = GeoEncodingUtils.mortonHash(lat, lon);
|
||||
double latEnc = GeoEncodingUtils.mortonUnhashLat(enc);
|
||||
double lonEnc = GeoEncodingUtils.mortonUnhashLon(enc);
|
||||
|
||||
// todo remove tolerance
|
||||
assertEquals("lat=" + lat + " latEnc=" + latEnc + " diff=" + (lat - latEnc), lat, latEnc, 1e-6);
|
||||
assertEquals("lon=" + lon + " lonEnc=" + lonEnc + " diff=" + (lon - lonEnc), lon, lonEnc, 1e-6);
|
||||
}
|
||||
}
|
||||
|
||||
/** make sure values always go down: this is important for edge case consistency */
|
||||
public void testEncodeDecodeRoundsDown() throws Exception {
|
||||
int iters = atLeast(1000);
|
||||
for(int iter=0;iter<iters;iter++) {
|
||||
double lat = -90 + 180.0 * random().nextDouble();
|
||||
double lon = -180 + 360.0 * random().nextDouble();
|
||||
|
||||
long enc = GeoEncodingUtils.mortonHash(lat, lon);
|
||||
double latEnc = GeoEncodingUtils.mortonUnhashLat(enc);
|
||||
double lonEnc = GeoEncodingUtils.mortonUnhashLon(enc);
|
||||
assertTrue(latEnc <= lat);
|
||||
assertTrue(lonEnc <= lon);
|
||||
}
|
||||
}
|
||||
|
||||
public void testScaleUnscaleIsStable() throws Exception {
|
||||
int iters = atLeast(1000);
|
||||
for(int iter=0;iter<iters;iter++) {
|
||||
double lat = nextLatitude();
|
||||
double lon = nextLongitude();
|
||||
|
||||
long enc = GeoEncodingUtils.mortonHash(lat, lon);
|
||||
double latEnc = GeoEncodingUtils.mortonUnhashLat(enc);
|
||||
double lonEnc = GeoEncodingUtils.mortonUnhashLon(enc);
|
||||
|
||||
long enc2 = GeoEncodingUtils.mortonHash(lat, lon);
|
||||
double latEnc2 = GeoEncodingUtils.mortonUnhashLat(enc2);
|
||||
double lonEnc2 = GeoEncodingUtils.mortonUnhashLon(enc2);
|
||||
assertEquals(latEnc, latEnc2, 0.0);
|
||||
assertEquals(lonEnc, lonEnc2, 0.0);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
package org.apache.lucene.spatial.util;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
import static org.apache.lucene.geo.GeoTestUtil.nextLatitude;
|
||||
import static org.apache.lucene.geo.GeoTestUtil.nextLongitude;
|
||||
import static org.apache.lucene.spatial.geopoint.document.GeoPointField.encodeLatLon;
|
||||
import static org.apache.lucene.spatial.geopoint.document.GeoPointField.geoCodedToPrefixCoded;
|
||||
import static org.apache.lucene.spatial.geopoint.document.GeoPointField.prefixCodedToGeoCoded;
|
||||
|
||||
/**
|
||||
* Tests encoding methods in {@link GeoPointField}
|
||||
*/
|
||||
public class TestGeoPointField extends LuceneTestCase {
|
||||
/**
|
||||
* Tests stability of {@link GeoPointField#geoCodedToPrefixCoded}
|
||||
*/
|
||||
public void testGeoPrefixCoding() throws Exception {
|
||||
int numIters = atLeast(1000);
|
||||
long hash;
|
||||
long decodedHash;
|
||||
BytesRefBuilder brb = new BytesRefBuilder();
|
||||
while (numIters-- >= 0) {
|
||||
hash = encodeLatLon(nextLatitude(), nextLongitude());
|
||||
for (int i=32; i<64; ++i) {
|
||||
geoCodedToPrefixCoded(hash, i, brb);
|
||||
decodedHash = prefixCodedToGeoCoded(brb.get());
|
||||
assertEquals((hash >>> i) << i, decodedHash);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.spatial.util;
|
||||
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
|
||||
import static org.apache.lucene.spatial.util.MortonEncoder.decodeLatitude;
|
||||
import static org.apache.lucene.spatial.util.MortonEncoder.decodeLongitude;
|
||||
import static org.apache.lucene.spatial.util.MortonEncoder.encode;
|
||||
import static org.apache.lucene.spatial.util.MortonEncoder.encodeCeil;
|
||||
|
||||
import static org.apache.lucene.util.BitUtil.deinterleave;
|
||||
import static org.apache.lucene.util.BitUtil.interleave;
|
||||
|
||||
/**
|
||||
* Tests methods in {@link MortonEncoder}
|
||||
*/
|
||||
public class TestMortonEncoder extends LuceneTestCase {
|
||||
|
||||
public void testMortonEncoding() throws Exception {
|
||||
final long TRANSLATE = 1L << 31;
|
||||
final double LATITUDE_DECODE = 180.0D/(0x1L<<32);
|
||||
final double LONGITUDE_DECODE = 360.0D/(0x1L<<32);
|
||||
Random random = random();
|
||||
for(int i=0; i < 10000; ++i) {
|
||||
long encoded = random().nextLong();
|
||||
long encodedLat = deinterleave(encoded >>> 1);
|
||||
long encodedLon = deinterleave(encoded);
|
||||
double expectedLat = decodeLatitude((int)(encodedLat - TRANSLATE));
|
||||
double decodedLat = decodeLatitude(encoded);
|
||||
double expectedLon = decodeLongitude((int)(encodedLon - TRANSLATE));
|
||||
double decodedLon = decodeLongitude(encoded);
|
||||
assertEquals(expectedLat, decodedLat, 0.0D);
|
||||
assertEquals(expectedLon, decodedLon, 0.0D);
|
||||
// should round-trip
|
||||
assertEquals(encoded, encode(decodedLat, decodedLon));
|
||||
|
||||
// test within the range
|
||||
if (encoded != 0xFFFFFFFFFFFFFFFFL) {
|
||||
// this is the next representable value
|
||||
// all double values between [min .. max) should encode to the current integer
|
||||
// all double values between (min .. max] should encodeCeil to the next integer.
|
||||
double maxLat = expectedLat + LATITUDE_DECODE;
|
||||
encodedLat += 1;
|
||||
assertEquals(maxLat, decodeLatitude((int)(encodedLat - TRANSLATE)), 0.0D);
|
||||
double maxLon = expectedLon + LONGITUDE_DECODE;
|
||||
encodedLon += 1;
|
||||
assertEquals(maxLon, decodeLongitude((int)(encodedLon - TRANSLATE)), 0.0D);
|
||||
long encodedNext = encode(maxLat, maxLon);
|
||||
assertEquals(interleave((int)encodedLon, (int)encodedLat), encodedNext);
|
||||
|
||||
// first and last doubles in range that will be quantized
|
||||
double minEdgeLat = Math.nextUp(expectedLat);
|
||||
double minEdgeLon = Math.nextUp(expectedLon);
|
||||
long encodedMinEdge = encode(minEdgeLat, minEdgeLon);
|
||||
long encodedMinEdgeCeil = encodeCeil(minEdgeLat, minEdgeLon);
|
||||
double maxEdgeLat = Math.nextDown(maxLat);
|
||||
double maxEdgeLon = Math.nextDown(maxLon);
|
||||
long encodedMaxEdge = encode(maxEdgeLat, maxEdgeLon);
|
||||
long encodedMaxEdgeCeil = encodeCeil(maxEdgeLat, maxEdgeLon);
|
||||
|
||||
assertEquals(encodedLat - 1, deinterleave(encodedMinEdge >>> 1));
|
||||
assertEquals(encodedLat, deinterleave(encodedMinEdgeCeil >>> 1));
|
||||
assertEquals(encodedLon - 1, deinterleave(encodedMinEdge));
|
||||
assertEquals(encodedLon, deinterleave(encodedMinEdgeCeil));
|
||||
|
||||
assertEquals(encodedLat - 1, deinterleave(encodedMaxEdge >>> 1));
|
||||
assertEquals(encodedLat, deinterleave(encodedMaxEdgeCeil >>> 1));
|
||||
assertEquals(encodedLon - 1, deinterleave(encodedMaxEdge));
|
||||
assertEquals(encodedLon, deinterleave(encodedMaxEdgeCeil));
|
||||
|
||||
// check random values within the double range
|
||||
long minBitsLat = NumericUtils.doubleToSortableLong(minEdgeLat);
|
||||
long maxBitsLat = NumericUtils.doubleToSortableLong(maxEdgeLat);
|
||||
long minBitsLon = NumericUtils.doubleToSortableLong(minEdgeLon);
|
||||
long maxBitsLon = NumericUtils.doubleToSortableLong(maxEdgeLon);
|
||||
for (int j = 0; j < 100; j++) {
|
||||
double valueLat = NumericUtils.sortableLongToDouble(TestUtil.nextLong(random, minBitsLat, maxBitsLat));
|
||||
double valueLon = NumericUtils.sortableLongToDouble(TestUtil.nextLong(random, minBitsLon, maxBitsLon));
|
||||
// round down
|
||||
assertEquals(encoded, encode(valueLat, valueLon));
|
||||
// round up
|
||||
assertEquals(interleave((int)encodedLon, (int)encodedLat), encodeCeil(valueLat, valueLon));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -203,6 +203,8 @@ Other Changes
|
|||
|
||||
* SOLR-8929: Add an idea module for solr/server to enable launching start.jar (Scott Blum, Steve Rowe)
|
||||
|
||||
* SOLR-8933: Solr should not close container streams. (Mike Drob, Uwe Schindler, Mark Miller)
|
||||
|
||||
================== 6.0.0 ==================
|
||||
|
||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release
|
||||
|
|
|
@ -17,9 +17,9 @@
|
|||
package org.apache.solr.servlet;
|
||||
|
||||
import javax.servlet.ServletInputStream;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -41,7 +41,10 @@ import java.util.Random;
|
|||
import java.util.Set;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.input.CloseShieldInputStream;
|
||||
import org.apache.commons.io.output.CloseShieldOutputStream;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HeaderIterator;
|
||||
|
@ -534,7 +537,8 @@ public class HttpSolrCall {
|
|||
} else if (isPostOrPutRequest) {
|
||||
HttpEntityEnclosingRequestBase entityRequest =
|
||||
"POST".equals(req.getMethod()) ? new HttpPost(urlstr) : new HttpPut(urlstr);
|
||||
HttpEntity entity = new InputStreamEntity(req.getInputStream(), req.getContentLength());
|
||||
InputStream in = new CloseShieldInputStream(req.getInputStream()); // Prevent close of container streams
|
||||
HttpEntity entity = new InputStreamEntity(in, req.getContentLength());
|
||||
entityRequest.setEntity(entity);
|
||||
method = entityRequest;
|
||||
} else if ("DELETE".equals(req.getMethod())) {
|
||||
|
@ -723,7 +727,8 @@ public class HttpSolrCall {
|
|||
}
|
||||
|
||||
if (Method.HEAD != reqMethod) {
|
||||
QueryResponseWriterUtil.writeQueryResponse(response.getOutputStream(), responseWriter, solrReq, solrRsp, ct);
|
||||
OutputStream out = new CloseShieldOutputStream(response.getOutputStream()); // Prevent close of container streams, see SOLR-8933
|
||||
QueryResponseWriterUtil.writeQueryResponse(out, responseWriter, solrReq, solrRsp, ct);
|
||||
}
|
||||
//else http HEAD request, nothing to write out, waited this long just to get ContentType
|
||||
} catch (EOFException e) {
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
package org.apache.solr.servlet;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.io.output.CloseShieldOutputStream;
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
|
@ -25,6 +26,7 @@ import org.apache.solr.core.SolrCore;
|
|||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
|
@ -49,11 +51,14 @@ public final class LoadAdminUiServlet extends BaseSolrServlet {
|
|||
String admin = request.getRequestURI().substring(request.getContextPath().length());
|
||||
CoreContainer cores = (CoreContainer) request.getAttribute("org.apache.solr.CoreContainer");
|
||||
InputStream in = getServletContext().getResourceAsStream(admin);
|
||||
Writer out = null;
|
||||
if(in != null && cores != null) {
|
||||
try {
|
||||
response.setCharacterEncoding("UTF-8");
|
||||
response.setContentType("text/html");
|
||||
Writer out = new OutputStreamWriter(response.getOutputStream(), StandardCharsets.UTF_8);
|
||||
|
||||
// Protect container owned streams from being closed by us, see SOLR-8933
|
||||
out = new OutputStreamWriter(new CloseShieldOutputStream(response.getOutputStream()), StandardCharsets.UTF_8);
|
||||
|
||||
String html = IOUtils.toString(in, "UTF-8");
|
||||
Package pack = SolrCore.class.getPackage();
|
||||
|
@ -70,9 +75,9 @@ public final class LoadAdminUiServlet extends BaseSolrServlet {
|
|||
};
|
||||
|
||||
out.write( StringUtils.replaceEach(html, search, replace) );
|
||||
out.flush();
|
||||
} finally {
|
||||
IOUtils.closeQuietly(in);
|
||||
IOUtils.closeQuietly(out);
|
||||
}
|
||||
} else {
|
||||
response.sendError(404);
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.servlet;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.servlet.ReadListener;
|
||||
import javax.servlet.ServletInputStream;
|
||||
|
||||
import org.apache.solr.common.util.SuppressForbidden;
|
||||
|
||||
/**
|
||||
* Provides a convenient extension of the {@link ServletInputStream} class that can be subclassed by developers wishing
|
||||
* to adapt the behavior of a Stream. One such example may be to override {@link #close()} to instead be a no-op as in
|
||||
* SOLR-8933.
|
||||
*
|
||||
* This class implements the Wrapper or Decorator pattern. Methods default to calling through to the wrapped stream.
|
||||
*/
|
||||
@SuppressForbidden(reason = "delegate methods")
|
||||
public class ServletInputStreamWrapper extends ServletInputStream {
|
||||
final ServletInputStream stream;
|
||||
|
||||
public ServletInputStreamWrapper(ServletInputStream stream) throws IOException {
|
||||
this.stream = stream;
|
||||
}
|
||||
|
||||
public int hashCode() {
|
||||
return stream.hashCode();
|
||||
}
|
||||
|
||||
public boolean equals(Object obj) {
|
||||
return stream.equals(obj);
|
||||
}
|
||||
|
||||
public int available() throws IOException {
|
||||
return stream.available();
|
||||
}
|
||||
|
||||
public void close() throws IOException {
|
||||
stream.close();
|
||||
}
|
||||
|
||||
public boolean isFinished() {
|
||||
return stream.isFinished();
|
||||
}
|
||||
|
||||
public boolean isReady() {
|
||||
return stream.isReady();
|
||||
}
|
||||
|
||||
public int read() throws IOException {
|
||||
return stream.read();
|
||||
}
|
||||
|
||||
public int read(byte[] b) throws IOException {
|
||||
return stream.read(b);
|
||||
}
|
||||
|
||||
public int read(byte[] b, int off, int len) throws IOException {
|
||||
return stream.read(b, off, len);
|
||||
}
|
||||
|
||||
public void mark(int readlimit) {
|
||||
stream.mark(readlimit);
|
||||
}
|
||||
|
||||
public boolean markSupported() {
|
||||
return stream.markSupported();
|
||||
}
|
||||
|
||||
public int readLine(byte[] b, int off, int len) throws IOException {
|
||||
return stream.readLine(b, off, len);
|
||||
}
|
||||
|
||||
public void reset() throws IOException {
|
||||
stream.reset();
|
||||
}
|
||||
|
||||
public void setReadListener(ReadListener arg0) {
|
||||
stream.setReadListener(arg0);
|
||||
}
|
||||
|
||||
public long skip(long n) throws IOException {
|
||||
return stream.skip(n);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return stream.toString();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.servlet;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.servlet.ServletOutputStream;
|
||||
import javax.servlet.WriteListener;
|
||||
|
||||
import org.apache.solr.common.util.SuppressForbidden;
|
||||
|
||||
/**
|
||||
* Provides a convenient extension of the {@link ServletOutputStream} class that can be subclassed by developers wishing
|
||||
* to adapt the behavior of a Stream. One such example may be to override {@link #close()} to instead be a no-op as in
|
||||
* SOLR-8933.
|
||||
*
|
||||
* This class implements the Wrapper or Decorator pattern. Methods default to calling through to the wrapped stream.
|
||||
*/
|
||||
@SuppressForbidden(reason = "delegate methods")
|
||||
public class ServletOutputStreamWrapper extends ServletOutputStream {
|
||||
final ServletOutputStream stream;
|
||||
|
||||
public ServletOutputStreamWrapper(ServletOutputStream stream) {
|
||||
this.stream = stream;
|
||||
}
|
||||
|
||||
public int hashCode() {
|
||||
return stream.hashCode();
|
||||
}
|
||||
|
||||
public boolean equals(Object obj) {
|
||||
return stream.equals(obj);
|
||||
}
|
||||
|
||||
public void flush() throws IOException {
|
||||
stream.flush();
|
||||
}
|
||||
|
||||
public void close() throws IOException {
|
||||
stream.close();
|
||||
}
|
||||
|
||||
public boolean isReady() {
|
||||
return stream.isReady();
|
||||
}
|
||||
|
||||
public void print(boolean arg0) throws IOException {
|
||||
stream.print(arg0);
|
||||
}
|
||||
|
||||
public void print(char c) throws IOException {
|
||||
stream.print(c);
|
||||
}
|
||||
|
||||
public void print(double d) throws IOException {
|
||||
stream.print(d);
|
||||
}
|
||||
|
||||
public void print(float f) throws IOException {
|
||||
stream.print(f);
|
||||
}
|
||||
|
||||
public void print(int i) throws IOException {
|
||||
stream.print(i);
|
||||
}
|
||||
|
||||
public void print(long l) throws IOException {
|
||||
stream.print(l);
|
||||
}
|
||||
|
||||
public void print(String arg0) throws IOException {
|
||||
stream.print(arg0);
|
||||
}
|
||||
|
||||
public void println() throws IOException {
|
||||
stream.println();
|
||||
}
|
||||
|
||||
public void println(boolean b) throws IOException {
|
||||
stream.println(b);
|
||||
}
|
||||
|
||||
public void println(char c) throws IOException {
|
||||
stream.println(c);
|
||||
}
|
||||
|
||||
public void println(double d) throws IOException {
|
||||
stream.println(d);
|
||||
}
|
||||
|
||||
public void println(float f) throws IOException {
|
||||
stream.println(f);
|
||||
}
|
||||
|
||||
public void println(int i) throws IOException {
|
||||
stream.println(i);
|
||||
}
|
||||
|
||||
public void println(long l) throws IOException {
|
||||
stream.println(l);
|
||||
}
|
||||
|
||||
public void println(String s) throws IOException {
|
||||
stream.println(s);
|
||||
}
|
||||
|
||||
public void setWriteListener(WriteListener arg0) {
|
||||
stream.setWriteListener(arg0);
|
||||
}
|
||||
|
||||
public void write(int b) throws IOException {
|
||||
stream.write(b);
|
||||
}
|
||||
|
||||
public void write(byte[] b) throws IOException {
|
||||
stream.write(b);
|
||||
}
|
||||
|
||||
public void write(byte[] b, int off, int len) throws IOException {
|
||||
stream.write(b, off, len);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return stream.toString();
|
||||
}
|
||||
}
|
|
@ -20,12 +20,18 @@ import javax.servlet.FilterChain;
|
|||
import javax.servlet.FilterConfig;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.ServletInputStream;
|
||||
import javax.servlet.ServletOutputStream;
|
||||
import javax.servlet.ServletRequest;
|
||||
import javax.servlet.ServletResponse;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletRequestWrapper;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.servlet.http.HttpServletResponseWrapper;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
|
@ -36,6 +42,8 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.input.CloseShieldInputStream;
|
||||
import org.apache.commons.io.output.CloseShieldOutputStream;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.solr.common.SolrException;
|
||||
|
@ -67,6 +75,9 @@ public class SolrDispatchFilter extends BaseSolrFilter {
|
|||
protected HttpClient httpClient;
|
||||
private ArrayList<Pattern> excludePatterns;
|
||||
|
||||
// Effectively immutable
|
||||
private Boolean testMode = null;
|
||||
|
||||
/**
|
||||
* Enum to define action that needs to be processed.
|
||||
* PASSTHROUGH: Pass through to Restlet via webapp.
|
||||
|
@ -80,6 +91,19 @@ public class SolrDispatchFilter extends BaseSolrFilter {
|
|||
}
|
||||
|
||||
public SolrDispatchFilter() {
|
||||
// turn on test mode when running tests
|
||||
assert testMode = true;
|
||||
|
||||
if (testMode == null) {
|
||||
testMode = false;
|
||||
} else {
|
||||
String tm = System.getProperty("solr.tests.doContainerStreamCloseAssert");
|
||||
if (tm != null) {
|
||||
testMode = Boolean.parseBoolean(tm);
|
||||
} else {
|
||||
testMode = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static final String PROPERTIES_ATTRIBUTE = "solr.properties";
|
||||
|
@ -202,6 +226,10 @@ public class SolrDispatchFilter extends BaseSolrFilter {
|
|||
if (wrappedRequest.get() != null) {
|
||||
request = wrappedRequest.get();
|
||||
}
|
||||
|
||||
request = closeShield(request, retry);
|
||||
response = closeShield(response, retry);
|
||||
|
||||
if (cores.getAuthenticationPlugin() != null) {
|
||||
log.debug("User principal: {}", ((HttpServletRequest) request).getUserPrincipal());
|
||||
}
|
||||
|
@ -298,4 +326,68 @@ public class SolrDispatchFilter extends BaseSolrFilter {
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap the request's input stream with a close shield, as if by a {@link CloseShieldInputStream}. If this is a
|
||||
* retry, we will assume that the stream has already been wrapped and do nothing.
|
||||
*
|
||||
* @param request The request to wrap.
|
||||
* @param retry If this is an original request or a retry.
|
||||
* @return A request object with an {@link InputStream} that will ignore calls to close.
|
||||
*/
|
||||
private ServletRequest closeShield(ServletRequest request, boolean retry) {
|
||||
if (testMode && !retry) {
|
||||
return new HttpServletRequestWrapper((HttpServletRequest) request) {
|
||||
ServletInputStream stream;
|
||||
|
||||
@Override
|
||||
public ServletInputStream getInputStream() throws IOException {
|
||||
// Lazy stream creation
|
||||
if (stream == null) {
|
||||
stream = new ServletInputStreamWrapper(super.getInputStream()) {
|
||||
@Override
|
||||
public void close() {
|
||||
assert false : "Attempted close of request input stream.";
|
||||
}
|
||||
};
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap the response's output stream with a close shield, as if by a {@link CloseShieldOutputStream}. If this is a
|
||||
* retry, we will assume that the stream has already been wrapped and do nothing.
|
||||
*
|
||||
* @param response The response to wrap.
|
||||
* @param retry If this response corresponds to an original request or a retry.
|
||||
* @return A response object with an {@link OutputStream} that will ignore calls to close.
|
||||
*/
|
||||
private ServletResponse closeShield(ServletResponse response, boolean retry) {
|
||||
if (testMode && !retry) {
|
||||
return new HttpServletResponseWrapper((HttpServletResponse) response) {
|
||||
ServletOutputStream stream;
|
||||
|
||||
@Override
|
||||
public ServletOutputStream getOutputStream() throws IOException {
|
||||
// Lazy stream creation
|
||||
if (stream == null) {
|
||||
stream = new ServletOutputStreamWrapper(super.getOutputStream()) {
|
||||
@Override
|
||||
public void close() {
|
||||
assert false : "Attempted close of response output stream.";
|
||||
}
|
||||
};
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
return response;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
package org.apache.solr.servlet;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -41,6 +42,7 @@ import java.util.Map;
|
|||
import org.apache.commons.fileupload.FileItem;
|
||||
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
|
||||
import org.apache.commons.fileupload.servlet.ServletFileUpload;
|
||||
import org.apache.commons.io.input.CloseShieldInputStream;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
|
@ -484,7 +486,8 @@ public class SolrRequestParsers
|
|||
|
||||
@Override
|
||||
public InputStream getStream() throws IOException {
|
||||
return req.getInputStream();
|
||||
// Protect container owned streams from being closed by us, see SOLR-8933
|
||||
return new CloseShieldInputStream(req.getInputStream());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -618,7 +621,8 @@ public class SolrRequestParsers
|
|||
final Charset charset = (cs == null) ? StandardCharsets.UTF_8 : Charset.forName(cs);
|
||||
|
||||
try {
|
||||
in = FastInputStream.wrap( in == null ? req.getInputStream() : in);
|
||||
// Protect container owned streams from being closed by us, see SOLR-8933
|
||||
in = FastInputStream.wrap( in == null ? new CloseShieldInputStream(req.getInputStream()) : in );
|
||||
|
||||
final long bytesRead = parseFormDataContent(in, maxLength, charset, map, false);
|
||||
if (bytesRead == 0L && totalLength > 0L) {
|
||||
|
@ -737,7 +741,9 @@ public class SolrRequestParsers
|
|||
if (formdata.isFormData(req)) {
|
||||
String userAgent = req.getHeader("User-Agent");
|
||||
boolean isCurl = userAgent != null && userAgent.startsWith("curl/");
|
||||
FastInputStream input = FastInputStream.wrap( req.getInputStream() );
|
||||
|
||||
// Protect container owned streams from being closed by us, see SOLR-8933
|
||||
FastInputStream input = FastInputStream.wrap( new CloseShieldInputStream(req.getInputStream()) );
|
||||
|
||||
if (isCurl) {
|
||||
SolrParams params = autodetect(req, streams, input);
|
||||
|
|
|
@ -64,6 +64,7 @@ public class JettyWebappTest extends SolrTestCaseJ4
|
|||
super.setUp();
|
||||
System.setProperty("solr.solr.home", SolrJettyTestBase.legacyExampleCollection1SolrHome());
|
||||
System.setProperty("tests.shardhandler.randomSeed", Long.toString(random().nextLong()));
|
||||
System.setProperty("solr.tests.doContainerStreamCloseAssert", "false");
|
||||
|
||||
File dataDir = createTempDir().toFile();
|
||||
dataDir.mkdirs();
|
||||
|
@ -95,6 +96,7 @@ public class JettyWebappTest extends SolrTestCaseJ4
|
|||
} catch( Exception ex ) {}
|
||||
System.clearProperty("tests.shardhandler.randomSeed");
|
||||
System.clearProperty("solr.data.dir");
|
||||
System.clearProperty("solr.tests.doContainerStreamCloseAssert");
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue