mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 09:28:27 +00:00
merge from master
This commit is contained in:
commit
564e1eca42
@ -293,6 +293,7 @@
|
||||
<include>org/elasticsearch/common/util/MockBigArrays$*.class</include>
|
||||
<include>org/elasticsearch/node/NodeMocksPlugin.class</include>
|
||||
<include>org/elasticsearch/node/MockNode.class</include>
|
||||
<include>org/elasticsearch/common/io/PathUtilsForTesting.class</include>
|
||||
</includes>
|
||||
<excludes>
|
||||
<!-- unit tests for yaml suite parser & rest spec parser need to be excluded -->
|
||||
|
@ -259,6 +259,10 @@ public class XAnalyzingSuggester extends Lookup {
|
||||
public long ramBytesUsed() {
|
||||
return fst == null ? 0 : fst.ramBytesUsed();
|
||||
}
|
||||
|
||||
public int getMaxAnalyzedPathsForOneInput() {
|
||||
return maxAnalyzedPathsForOneInput;
|
||||
}
|
||||
|
||||
// Replaces SEP with epsilon or remaps them if
|
||||
// we were asked to preserve them:
|
||||
|
@ -58,28 +58,28 @@ public class CustomFieldQuery extends FieldQuery {
|
||||
}
|
||||
|
||||
@Override
|
||||
void flatten(Query sourceQuery, IndexReader reader, Collection<Query> flatQueries) throws IOException {
|
||||
void flatten(Query sourceQuery, IndexReader reader, Collection<Query> flatQueries, float boost) throws IOException {
|
||||
if (sourceQuery instanceof SpanTermQuery) {
|
||||
super.flatten(new TermQuery(((SpanTermQuery) sourceQuery).getTerm()), reader, flatQueries);
|
||||
super.flatten(new TermQuery(((SpanTermQuery) sourceQuery).getTerm()), reader, flatQueries, boost);
|
||||
} else if (sourceQuery instanceof ConstantScoreQuery) {
|
||||
flatten(((ConstantScoreQuery) sourceQuery).getQuery(), reader, flatQueries);
|
||||
flatten(((ConstantScoreQuery) sourceQuery).getQuery(), reader, flatQueries, boost);
|
||||
} else if (sourceQuery instanceof FunctionScoreQuery) {
|
||||
flatten(((FunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries);
|
||||
flatten(((FunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries, boost);
|
||||
} else if (sourceQuery instanceof FilteredQuery) {
|
||||
flatten(((FilteredQuery) sourceQuery).getQuery(), reader, flatQueries);
|
||||
flatten(((FilteredQuery) sourceQuery).getQuery(), reader, flatQueries, boost);
|
||||
flatten(((FilteredQuery) sourceQuery).getFilter(), reader, flatQueries);
|
||||
} else if (sourceQuery instanceof MultiPhrasePrefixQuery) {
|
||||
flatten(sourceQuery.rewrite(reader), reader, flatQueries);
|
||||
flatten(sourceQuery.rewrite(reader), reader, flatQueries, boost);
|
||||
} else if (sourceQuery instanceof FiltersFunctionScoreQuery) {
|
||||
flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries);
|
||||
flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries, boost);
|
||||
} else if (sourceQuery instanceof MultiPhraseQuery) {
|
||||
MultiPhraseQuery q = ((MultiPhraseQuery) sourceQuery);
|
||||
convertMultiPhraseQuery(0, new int[q.getTermArrays().size()], q, q.getTermArrays(), q.getPositions(), reader, flatQueries);
|
||||
} else if (sourceQuery instanceof BlendedTermQuery) {
|
||||
final BlendedTermQuery blendedTermQuery = (BlendedTermQuery) sourceQuery;
|
||||
flatten(blendedTermQuery.rewrite(reader), reader, flatQueries);
|
||||
flatten(blendedTermQuery.rewrite(reader), reader, flatQueries, boost);
|
||||
} else {
|
||||
super.flatten(sourceQuery, reader, flatQueries);
|
||||
super.flatten(sourceQuery, reader, flatQueries, boost);
|
||||
}
|
||||
}
|
||||
|
||||
@ -93,7 +93,7 @@ public class CustomFieldQuery extends FieldQuery {
|
||||
if (numTerms > 16) {
|
||||
for (Term[] currentPosTerm : terms) {
|
||||
for (Term term : currentPosTerm) {
|
||||
super.flatten(new TermQuery(term), reader, flatQueries);
|
||||
super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost());
|
||||
}
|
||||
}
|
||||
return;
|
||||
@ -111,7 +111,7 @@ public class CustomFieldQuery extends FieldQuery {
|
||||
}
|
||||
PhraseQuery query = queryBuilder.build();
|
||||
query.setBoost(orig.getBoost());
|
||||
this.flatten(query, reader, flatQueries);
|
||||
this.flatten(query, reader, flatQueries, orig.getBoost());
|
||||
} else {
|
||||
Term[] t = terms.get(currentPos);
|
||||
for (int i = 0; i < t.length; i++) {
|
||||
@ -127,7 +127,7 @@ public class CustomFieldQuery extends FieldQuery {
|
||||
return;
|
||||
}
|
||||
if (sourceFilter instanceof QueryWrapperFilter) {
|
||||
flatten(((QueryWrapperFilter) sourceFilter).getQuery(), reader, flatQueries);
|
||||
flatten(((QueryWrapperFilter) sourceFilter).getQuery(), reader, flatQueries, 1.0F);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
279
core/src/main/java/org/apache/lucene/util/XGeoHashUtils.java
Normal file
279
core/src/main/java/org/apache/lucene/util/XGeoHashUtils.java
Normal file
@ -0,0 +1,279 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.util;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
* Utilities for converting to/from the GeoHash standard
|
||||
*
|
||||
* The geohash long format is represented as lon/lat (x/y) interleaved with the 4 least significant bits
|
||||
* representing the level (1-12) [xyxy...xyxyllll]
|
||||
*
|
||||
* This differs from a morton encoded value which interleaves lat/lon (y/x).
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class XGeoHashUtils {
|
||||
public static final char[] BASE_32 = {'0', '1', '2', '3', '4', '5', '6',
|
||||
'7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n',
|
||||
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
|
||||
|
||||
public static final String BASE_32_STRING = new String(BASE_32);
|
||||
|
||||
public static final int PRECISION = 12;
|
||||
private static final short MORTON_OFFSET = (XGeoUtils.BITS<<1) - (PRECISION*5);
|
||||
|
||||
/**
|
||||
* Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
|
||||
*/
|
||||
public static final long longEncode(final double lon, final double lat, final int level) {
|
||||
// shift to appropriate level
|
||||
final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET);
|
||||
return ((BitUtil.flipFlop(XGeoUtils.mortonHash(lon, lat)) >>> msf) << 4) | level;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode from geohash string to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
|
||||
*/
|
||||
public static final long longEncode(final String hash) {
|
||||
int level = hash.length()-1;
|
||||
long b;
|
||||
long l = 0L;
|
||||
for(char c : hash.toCharArray()) {
|
||||
b = (long)(BASE_32_STRING.indexOf(c));
|
||||
l |= (b<<(level--*5));
|
||||
}
|
||||
return (l<<4)|hash.length();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode an existing geohash long to the provided precision
|
||||
*/
|
||||
public static long longEncode(long geohash, int level) {
|
||||
final short precision = (short)(geohash & 15);
|
||||
if (precision == level) {
|
||||
return geohash;
|
||||
} else if (precision > level) {
|
||||
return ((geohash >>> (((precision - level) * 5) + 4)) << 4) | level;
|
||||
}
|
||||
return ((geohash >>> 4) << (((level - precision) * 5) + 4) | level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a geohash string from the geohash based long format
|
||||
*/
|
||||
public static final String stringEncode(long geoHashLong) {
|
||||
int level = (int)geoHashLong&15;
|
||||
geoHashLong >>>= 4;
|
||||
char[] chars = new char[level];
|
||||
do {
|
||||
chars[--level] = BASE_32[(int)(geoHashLong&31L)];
|
||||
geoHashLong>>>=5;
|
||||
} while(level > 0);
|
||||
|
||||
return new String(chars);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a geohash string from full resolution longitude, latitude)
|
||||
*/
|
||||
public static final String stringEncode(final double lon, final double lat) {
|
||||
return stringEncode(lon, lat, 12);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a level specific geohash string from full resolution longitude, latitude
|
||||
*/
|
||||
public static final String stringEncode(final double lon, final double lat, final int level) {
|
||||
// bit twiddle to geohash (since geohash is a swapped (lon/lat) encoding)
|
||||
final long hashedVal = BitUtil.flipFlop(XGeoUtils.mortonHash(lon, lat));
|
||||
|
||||
StringBuilder geoHash = new StringBuilder();
|
||||
short precision = 0;
|
||||
final short msf = (XGeoUtils.BITS<<1)-5;
|
||||
long mask = 31L<<msf;
|
||||
do {
|
||||
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
|
||||
// next 5 bits
|
||||
mask >>>= 5;
|
||||
} while (++precision < level);
|
||||
return geoHash.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a full precision geohash string from a given morton encoded long value
|
||||
*/
|
||||
public static final String stringEncodeFromMortonLong(final long hashedVal) throws Exception {
|
||||
return stringEncode(hashedVal, PRECISION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a geohash string at a given level from a morton long
|
||||
*/
|
||||
public static final String stringEncodeFromMortonLong(long hashedVal, final int level) {
|
||||
// bit twiddle to geohash (since geohash is a swapped (lon/lat) encoding)
|
||||
hashedVal = BitUtil.flipFlop(hashedVal);
|
||||
|
||||
StringBuilder geoHash = new StringBuilder();
|
||||
short precision = 0;
|
||||
final short msf = (XGeoUtils.BITS<<1)-5;
|
||||
long mask = 31L<<msf;
|
||||
do {
|
||||
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
|
||||
// next 5 bits
|
||||
mask >>>= 5;
|
||||
} while (++precision < level);
|
||||
return geoHash.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a morton long value from a given geohash string
|
||||
*/
|
||||
public static final long mortonEncode(final String hash) {
|
||||
int level = 11;
|
||||
long b;
|
||||
long l = 0L;
|
||||
for(char c : hash.toCharArray()) {
|
||||
b = (long)(BASE_32_STRING.indexOf(c));
|
||||
l |= (b<<((level--*5) + MORTON_OFFSET));
|
||||
}
|
||||
return BitUtil.flipFlop(l);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a morton long value from a given geohash long value
|
||||
*/
|
||||
public static final long mortonEncode(final long geoHashLong) {
|
||||
final int level = (int)(geoHashLong&15);
|
||||
final short odd = (short)(level & 1);
|
||||
|
||||
return BitUtil.flipFlop((geoHashLong >>> 4) << odd) << (((12 - level) * 5) + (MORTON_OFFSET - odd));
|
||||
}
|
||||
|
||||
private static final char encode(int x, int y) {
|
||||
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate all neighbors of a given geohash cell.
|
||||
*
|
||||
* @param geohash Geohash of the defined cell
|
||||
* @return geohashes of all neighbor cells
|
||||
*/
|
||||
public static Collection<? extends CharSequence> neighbors(String geohash) {
|
||||
return addNeighbors(geohash, geohash.length(), new ArrayList<CharSequence>(8));
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the geohash of a neighbor of a geohash
|
||||
*
|
||||
* @param geohash the geohash of a cell
|
||||
* @param level level of the geohash
|
||||
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
|
||||
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
|
||||
* @return geohash of the defined cell
|
||||
*/
|
||||
private final static String neighbor(String geohash, int level, int dx, int dy) {
|
||||
int cell = BASE_32_STRING.indexOf(geohash.charAt(level -1));
|
||||
|
||||
// Decoding the Geohash bit pattern to determine grid coordinates
|
||||
int x0 = cell & 1; // first bit of x
|
||||
int y0 = cell & 2; // first bit of y
|
||||
int x1 = cell & 4; // second bit of x
|
||||
int y1 = cell & 8; // second bit of y
|
||||
int x2 = cell & 16; // third bit of x
|
||||
|
||||
// combine the bitpattern to grid coordinates.
|
||||
// note that the semantics of x and y are swapping
|
||||
// on each level
|
||||
int x = x0 + (x1 / 2) + (x2 / 4);
|
||||
int y = (y0 / 2) + (y1 / 4);
|
||||
|
||||
if (level == 1) {
|
||||
// Root cells at north (namely "bcfguvyz") or at
|
||||
// south (namely "0145hjnp") do not have neighbors
|
||||
// in north/south direction
|
||||
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
|
||||
return null;
|
||||
} else {
|
||||
return Character.toString(encode(x + dx, y + dy));
|
||||
}
|
||||
} else {
|
||||
// define grid coordinates for next level
|
||||
final int nx = ((level % 2) == 1) ? (x + dx) : (x + dy);
|
||||
final int ny = ((level % 2) == 1) ? (y + dy) : (y + dx);
|
||||
|
||||
// if the defined neighbor has the same parent a the current cell
|
||||
// encode the cell directly. Otherwise find the cell next to this
|
||||
// cell recursively. Since encoding wraps around within a cell
|
||||
// it can be encoded here.
|
||||
// xLimit and YLimit must always be respectively 7 and 3
|
||||
// since x and y semantics are swapping on each level.
|
||||
if (nx >= 0 && nx <= 7 && ny >= 0 && ny <= 3) {
|
||||
return geohash.substring(0, level - 1) + encode(nx, ny);
|
||||
} else {
|
||||
String neighbor = neighbor(geohash, level - 1, dx, dy);
|
||||
return (neighbor != null) ? neighbor + encode(nx, ny) : neighbor;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
*/
|
||||
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
|
||||
return addNeighbors(geohash, geohash.length(), neighbors);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param length level of the given geohash
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
*/
|
||||
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, int length, E neighbors) {
|
||||
String south = neighbor(geohash, length, 0, -1);
|
||||
String north = neighbor(geohash, length, 0, +1);
|
||||
if (north != null) {
|
||||
neighbors.add(neighbor(north, length, -1, 0));
|
||||
neighbors.add(north);
|
||||
neighbors.add(neighbor(north, length, +1, 0));
|
||||
}
|
||||
|
||||
neighbors.add(neighbor(geohash, length, -1, 0));
|
||||
neighbors.add(neighbor(geohash, length, +1, 0));
|
||||
|
||||
if (south != null) {
|
||||
neighbors.add(neighbor(south, length, -1, 0));
|
||||
neighbors.add(south);
|
||||
neighbors.add(neighbor(south, length, +1, 0));
|
||||
}
|
||||
|
||||
return neighbors;
|
||||
}
|
||||
}
|
@ -0,0 +1,383 @@
|
||||
package org.apache.lucene.util;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Reusable geo-spatial projection utility methods.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class XGeoProjectionUtils {
|
||||
// WGS84 earth-ellipsoid major (a) minor (b) radius, (f) flattening and eccentricity (e)
|
||||
static final double SEMIMAJOR_AXIS = 6_378_137; // [m]
|
||||
static final double FLATTENING = 1.0/298.257223563;
|
||||
static final double SEMIMINOR_AXIS = SEMIMAJOR_AXIS * (1.0 - FLATTENING); //6_356_752.31420; // [m]
|
||||
static final double ECCENTRICITY = StrictMath.sqrt((2.0 - FLATTENING) * FLATTENING);
|
||||
static final double PI_OVER_2 = StrictMath.PI / 2.0D;
|
||||
static final double SEMIMAJOR_AXIS2 = SEMIMAJOR_AXIS * SEMIMAJOR_AXIS;
|
||||
static final double SEMIMINOR_AXIS2 = SEMIMINOR_AXIS * SEMIMINOR_AXIS;
|
||||
|
||||
/**
|
||||
* Converts from geocentric earth-centered earth-fixed to geodesic lat/lon/alt
|
||||
* @param x Cartesian x coordinate
|
||||
* @param y Cartesian y coordinate
|
||||
* @param z Cartesian z coordinate
|
||||
* @param lla 0: longitude 1: latitude: 2: altitude
|
||||
* @return double array as 0: longitude 1: latitude 2: altitude
|
||||
*/
|
||||
public static final double[] ecfToLLA(final double x, final double y, final double z, double[] lla) {
|
||||
boolean atPole = false;
|
||||
final double ad_c = 1.0026000D;
|
||||
final double e2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMAJOR_AXIS2);
|
||||
final double ep2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMINOR_AXIS2);
|
||||
final double cos67P5 = 0.38268343236508977D;
|
||||
|
||||
if (lla == null) {
|
||||
lla = new double[3];
|
||||
}
|
||||
|
||||
if (x != 0.0) {
|
||||
lla[0] = StrictMath.atan2(y,x);
|
||||
} else {
|
||||
if (y > 0) {
|
||||
lla[0] = PI_OVER_2;
|
||||
} else if (y < 0) {
|
||||
lla[0] = -PI_OVER_2;
|
||||
} else {
|
||||
atPole = true;
|
||||
lla[0] = 0.0D;
|
||||
if (z > 0.0) {
|
||||
lla[1] = PI_OVER_2;
|
||||
} else if (z < 0.0) {
|
||||
lla[1] = -PI_OVER_2;
|
||||
} else {
|
||||
lla[1] = PI_OVER_2;
|
||||
lla[2] = -SEMIMINOR_AXIS;
|
||||
return lla;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final double w2 = x*x + y*y;
|
||||
final double w = StrictMath.sqrt(w2);
|
||||
final double t0 = z * ad_c;
|
||||
final double s0 = StrictMath.sqrt(t0 * t0 + w2);
|
||||
final double sinB0 = t0 / s0;
|
||||
final double cosB0 = w / s0;
|
||||
final double sin3B0 = sinB0 * sinB0 * sinB0;
|
||||
final double t1 = z + SEMIMINOR_AXIS * ep2 * sin3B0;
|
||||
final double sum = w - SEMIMAJOR_AXIS * e2 * cosB0 * cosB0 * cosB0;
|
||||
final double s1 = StrictMath.sqrt(t1 * t1 + sum * sum);
|
||||
final double sinP1 = t1 / s1;
|
||||
final double cosP1 = sum / s1;
|
||||
final double rn = SEMIMAJOR_AXIS / StrictMath.sqrt(1.0D - e2 * sinP1 * sinP1);
|
||||
|
||||
if (cosP1 >= cos67P5) {
|
||||
lla[2] = w / cosP1 - rn;
|
||||
} else if (cosP1 <= -cos67P5) {
|
||||
lla[2] = w / -cosP1 - rn;
|
||||
} else {
|
||||
lla[2] = z / sinP1 + rn * (e2 - 1.0);
|
||||
}
|
||||
if (!atPole) {
|
||||
lla[1] = StrictMath.atan(sinP1/cosP1);
|
||||
}
|
||||
lla[0] = StrictMath.toDegrees(lla[0]);
|
||||
lla[1] = StrictMath.toDegrees(lla[1]);
|
||||
|
||||
return lla;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts from geodesic lon lat alt to geocentric earth-centered earth-fixed
|
||||
* @param lon geodesic longitude
|
||||
* @param lat geodesic latitude
|
||||
* @param alt geodesic altitude
|
||||
* @param ecf reusable earth-centered earth-fixed result
|
||||
* @return either a new ecef array or the reusable ecf parameter
|
||||
*/
|
||||
public static final double[] llaToECF(double lon, double lat, double alt, double[] ecf) {
|
||||
lon = StrictMath.toRadians(lon);
|
||||
lat = StrictMath.toRadians(lat);
|
||||
|
||||
final double sl = StrictMath.sin(lat);
|
||||
final double s2 = sl*sl;
|
||||
final double cl = StrictMath.cos(lat);
|
||||
final double ge2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMAJOR_AXIS2);
|
||||
|
||||
if (ecf == null) {
|
||||
ecf = new double[3];
|
||||
}
|
||||
|
||||
if (lat < -PI_OVER_2 && lat > -1.001D * PI_OVER_2) {
|
||||
lat = -PI_OVER_2;
|
||||
} else if (lat > PI_OVER_2 && lat < 1.001D * PI_OVER_2) {
|
||||
lat = PI_OVER_2;
|
||||
}
|
||||
assert (lat >= -PI_OVER_2) || (lat <= PI_OVER_2);
|
||||
|
||||
if (lon > StrictMath.PI) {
|
||||
lon -= (2*StrictMath.PI);
|
||||
}
|
||||
|
||||
final double rn = SEMIMAJOR_AXIS / StrictMath.sqrt(1.0D - ge2 * s2);
|
||||
ecf[0] = (rn+alt) * cl * StrictMath.cos(lon);
|
||||
ecf[1] = (rn+alt) * cl * StrictMath.sin(lon);
|
||||
ecf[2] = ((rn*(1.0-ge2))+alt)*sl;
|
||||
|
||||
return ecf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts from lat lon alt (in degrees) to East North Up right-hand coordinate system
|
||||
* @param lon longitude in degrees
|
||||
* @param lat latitude in degrees
|
||||
* @param alt altitude in meters
|
||||
* @param centerLon reference point longitude in degrees
|
||||
* @param centerLat reference point latitude in degrees
|
||||
* @param centerAlt reference point altitude in meters
|
||||
* @param enu result east, north, up coordinate
|
||||
* @return east, north, up coordinate
|
||||
*/
|
||||
public static double[] llaToENU(final double lon, final double lat, final double alt, double centerLon,
|
||||
double centerLat, final double centerAlt, double[] enu) {
|
||||
if (enu == null) {
|
||||
enu = new double[3];
|
||||
}
|
||||
|
||||
// convert point to ecf coordinates
|
||||
final double[] ecf = llaToECF(lon, lat, alt, null);
|
||||
|
||||
// convert from ecf to enu
|
||||
return ecfToENU(ecf[0], ecf[1], ecf[2], centerLon, centerLat, centerAlt, enu);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts from East North Up right-hand rule to lat lon alt in degrees
|
||||
* @param x easting (in meters)
|
||||
* @param y northing (in meters)
|
||||
* @param z up (in meters)
|
||||
* @param centerLon reference point longitude (in degrees)
|
||||
* @param centerLat reference point latitude (in degrees)
|
||||
* @param centerAlt reference point altitude (in meters)
|
||||
* @param lla resulting lat, lon, alt point (in degrees)
|
||||
* @return lat, lon, alt point (in degrees)
|
||||
*/
|
||||
public static double[] enuToLLA(final double x, final double y, final double z, final double centerLon,
|
||||
final double centerLat, final double centerAlt, double[] lla) {
|
||||
// convert enuToECF
|
||||
if (lla == null) {
|
||||
lla = new double[3];
|
||||
}
|
||||
|
||||
// convert enuToECF, storing intermediate result in lla
|
||||
lla = enuToECF(x, y, z, centerLon, centerLat, centerAlt, lla);
|
||||
|
||||
// convert ecf to LLA
|
||||
return ecfToLLA(lla[0], lla[1], lla[2], lla);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert from Earth-Centered-Fixed to Easting, Northing, Up Right Hand System
|
||||
* @param x ECF X coordinate (in meters)
|
||||
* @param y ECF Y coordinate (in meters)
|
||||
* @param z ECF Z coordinate (in meters)
|
||||
* @param centerLon ENU origin longitude (in degrees)
|
||||
* @param centerLat ENU origin latitude (in degrees)
|
||||
* @param centerAlt ENU altitude (in meters)
|
||||
* @param enu reusable enu result
|
||||
* @return Easting, Northing, Up coordinate
|
||||
*/
|
||||
public static double[] ecfToENU(double x, double y, double z, final double centerLon,
|
||||
final double centerLat, final double centerAlt, double[] enu) {
|
||||
if (enu == null) {
|
||||
enu = new double[3];
|
||||
}
|
||||
|
||||
// create rotation matrix and rotate to enu orientation
|
||||
final double[][] phi = createPhiTransform(centerLon, centerLat, null);
|
||||
|
||||
// convert origin to ENU
|
||||
final double[] originECF = llaToECF(centerLon, centerLat, centerAlt, null);
|
||||
final double[] originENU = new double[3];
|
||||
originENU[0] = ((phi[0][0] * originECF[0]) + (phi[0][1] * originECF[1]) + (phi[0][2] * originECF[2]));
|
||||
originENU[1] = ((phi[1][0] * originECF[0]) + (phi[1][1] * originECF[1]) + (phi[1][2] * originECF[2]));
|
||||
originENU[2] = ((phi[2][0] * originECF[0]) + (phi[2][1] * originECF[1]) + (phi[2][2] * originECF[2]));
|
||||
|
||||
// rotate then translate
|
||||
enu[0] = ((phi[0][0] * x) + (phi[0][1] * y) + (phi[0][2] * z)) - originENU[0];
|
||||
enu[1] = ((phi[1][0] * x) + (phi[1][1] * y) + (phi[1][2] * z)) - originENU[1];
|
||||
enu[2] = ((phi[2][0] * x) + (phi[2][1] * y) + (phi[2][2] * z)) - originENU[2];
|
||||
|
||||
return enu;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert from Easting, Northing, Up Right-Handed system to Earth Centered Fixed system
|
||||
* @param x ENU x coordinate (in meters)
|
||||
* @param y ENU y coordinate (in meters)
|
||||
* @param z ENU z coordinate (in meters)
|
||||
* @param centerLon ENU origin longitude (in degrees)
|
||||
* @param centerLat ENU origin latitude (in degrees)
|
||||
* @param centerAlt ENU origin altitude (in meters)
|
||||
* @param ecf reusable ecf result
|
||||
* @return ecf result coordinate
|
||||
*/
|
||||
public static double[] enuToECF(final double x, final double y, final double z, double centerLon,
|
||||
double centerLat, final double centerAlt, double[] ecf) {
|
||||
if (ecf == null) {
|
||||
ecf = new double[3];
|
||||
}
|
||||
|
||||
double[][] phi = createTransposedPhiTransform(centerLon, centerLat, null);
|
||||
double[] ecfOrigin = llaToECF(centerLon, centerLat, centerAlt, null);
|
||||
|
||||
// rotate and translate
|
||||
ecf[0] = (phi[0][0]*x + phi[0][1]*y + phi[0][2]*z) + ecfOrigin[0];
|
||||
ecf[1] = (phi[1][0]*x + phi[1][1]*y + phi[1][2]*z) + ecfOrigin[1];
|
||||
ecf[2] = (phi[2][0]*x + phi[2][1]*y + phi[2][2]*z) + ecfOrigin[2];
|
||||
|
||||
return ecf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the rotation matrix for converting Earth Centered Fixed to Easting Northing Up
|
||||
* @param originLon ENU origin longitude (in degrees)
|
||||
* @param originLat ENU origin latitude (in degrees)
|
||||
* @param phiMatrix reusable phi matrix result
|
||||
* @return phi rotation matrix
|
||||
*/
|
||||
private static double[][] createPhiTransform(double originLon, double originLat, double[][] phiMatrix) {
|
||||
|
||||
if (phiMatrix == null) {
|
||||
phiMatrix = new double[3][3];
|
||||
}
|
||||
|
||||
originLon = StrictMath.toRadians(originLon);
|
||||
originLat = StrictMath.toRadians(originLat);
|
||||
|
||||
final double sLon = StrictMath.sin(originLon);
|
||||
final double cLon = StrictMath.cos(originLon);
|
||||
final double sLat = StrictMath.sin(originLat);
|
||||
final double cLat = StrictMath.cos(originLat);
|
||||
|
||||
phiMatrix[0][0] = -sLon;
|
||||
phiMatrix[0][1] = cLon;
|
||||
phiMatrix[0][2] = 0.0D;
|
||||
phiMatrix[1][0] = -sLat * cLon;
|
||||
phiMatrix[1][1] = -sLat * sLon;
|
||||
phiMatrix[1][2] = cLat;
|
||||
phiMatrix[2][0] = cLat * cLon;
|
||||
phiMatrix[2][1] = cLat * sLon;
|
||||
phiMatrix[2][2] = sLat;
|
||||
|
||||
return phiMatrix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the transposed rotation matrix for converting Easting Northing Up coordinates to Earth Centered Fixed
|
||||
* @param originLon ENU origin longitude (in degrees)
|
||||
* @param originLat ENU origin latitude (in degrees)
|
||||
* @param phiMatrix reusable phi rotation matrix result
|
||||
* @return transposed phi rotation matrix
|
||||
*/
|
||||
private static double[][] createTransposedPhiTransform(double originLon, double originLat, double[][] phiMatrix) {
|
||||
|
||||
if (phiMatrix == null) {
|
||||
phiMatrix = new double[3][3];
|
||||
}
|
||||
|
||||
originLon = StrictMath.toRadians(originLon);
|
||||
originLat = StrictMath.toRadians(originLat);
|
||||
|
||||
final double sLat = StrictMath.sin(originLat);
|
||||
final double cLat = StrictMath.cos(originLat);
|
||||
final double sLon = StrictMath.sin(originLon);
|
||||
final double cLon = StrictMath.cos(originLon);
|
||||
|
||||
phiMatrix[0][0] = -sLon;
|
||||
phiMatrix[1][0] = cLon;
|
||||
phiMatrix[2][0] = 0.0D;
|
||||
phiMatrix[0][1] = -sLat * cLon;
|
||||
phiMatrix[1][1] = -sLat * sLon;
|
||||
phiMatrix[2][1] = cLat;
|
||||
phiMatrix[0][2] = cLat * cLon;
|
||||
phiMatrix[1][2] = cLat * sLon;
|
||||
phiMatrix[2][2] = sLat;
|
||||
|
||||
return phiMatrix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds a point along a bearing from a given lon,lat geolocation using vincenty's distance formula
|
||||
*
|
||||
* @param lon origin longitude in degrees
|
||||
* @param lat origin latitude in degrees
|
||||
* @param bearing azimuthal bearing in degrees
|
||||
* @param dist distance in meters
|
||||
* @param pt resulting point
|
||||
* @return the point along a bearing at a given distance in meters
|
||||
*/
|
||||
public static final double[] pointFromLonLatBearing(double lon, double lat, double bearing, double dist, double[] pt) {
|
||||
|
||||
if (pt == null) {
|
||||
pt = new double[2];
|
||||
}
|
||||
|
||||
final double alpha1 = StrictMath.toRadians(bearing);
|
||||
final double cosA1 = StrictMath.cos(alpha1);
|
||||
final double sinA1 = StrictMath.sin(alpha1);
|
||||
final double tanU1 = (1-FLATTENING) * StrictMath.tan(StrictMath.toRadians(lat));
|
||||
final double cosU1 = 1 / StrictMath.sqrt((1+tanU1*tanU1));
|
||||
final double sinU1 = tanU1*cosU1;
|
||||
final double sig1 = StrictMath.atan2(tanU1, cosA1);
|
||||
final double sinAlpha = cosU1 * sinA1;
|
||||
final double cosSqAlpha = 1 - sinAlpha*sinAlpha;
|
||||
final double uSq = cosSqAlpha * (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2) / SEMIMINOR_AXIS2;
|
||||
final double A = 1 + uSq/16384D*(4096D + uSq * (-768D + uSq * (320D - 175D*uSq)));
|
||||
final double B = uSq/1024D * (256D + uSq * (-128D + uSq * (74D - 47D * uSq)));
|
||||
|
||||
double sigma = dist / (SEMIMINOR_AXIS*A);
|
||||
double sigmaP;
|
||||
double sinSigma, cosSigma, cos2SigmaM, deltaSigma;
|
||||
|
||||
do {
|
||||
cos2SigmaM = StrictMath.cos(2*sig1 + sigma);
|
||||
sinSigma = StrictMath.sin(sigma);
|
||||
cosSigma = StrictMath.cos(sigma);
|
||||
|
||||
deltaSigma = B * sinSigma * (cos2SigmaM + (B/4D) * (cosSigma*(-1+2*cos2SigmaM*cos2SigmaM)-
|
||||
(B/6) * cos2SigmaM*(-3+4*sinSigma*sinSigma)*(-3+4*cos2SigmaM*cos2SigmaM)));
|
||||
sigmaP = sigma;
|
||||
sigma = dist / (SEMIMINOR_AXIS*A) + deltaSigma;
|
||||
} while (StrictMath.abs(sigma-sigmaP) > 1E-12);
|
||||
|
||||
final double tmp = sinU1*sinSigma - cosU1*cosSigma*cosA1;
|
||||
final double lat2 = StrictMath.atan2(sinU1*cosSigma + cosU1*sinSigma*cosA1,
|
||||
(1-FLATTENING) * StrictMath.sqrt(sinAlpha*sinAlpha + tmp*tmp));
|
||||
final double lambda = StrictMath.atan2(sinSigma*sinA1, cosU1*cosSigma - sinU1*sinSigma*cosA1);
|
||||
final double c = FLATTENING/16 * cosSqAlpha * (4 + FLATTENING * (4 - 3 * cosSqAlpha));
|
||||
|
||||
final double lam = lambda - (1-c) * FLATTENING * sinAlpha *
|
||||
(sigma + c * sinSigma * (cos2SigmaM + c * cosSigma * (-1 + 2* cos2SigmaM*cos2SigmaM)));
|
||||
pt[0] = lon + StrictMath.toDegrees(lam);
|
||||
pt[1] = StrictMath.toDegrees(lat2);
|
||||
|
||||
return pt;
|
||||
}
|
||||
}
|
429
core/src/main/java/org/apache/lucene/util/XGeoUtils.java
Normal file
429
core/src/main/java/org/apache/lucene/util/XGeoUtils.java
Normal file
@ -0,0 +1,429 @@
|
||||
package org.apache.lucene.util;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
* Basic reusable geo-spatial utility methods
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public final class XGeoUtils {
|
||||
private static final short MIN_LON = -180;
|
||||
private static final short MIN_LAT = -90;
|
||||
public static final short BITS = 31;
|
||||
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
|
||||
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
|
||||
public static final double TOLERANCE = 1E-6;
|
||||
|
||||
/** Minimum longitude value. */
|
||||
public static final double MIN_LON_INCL = -180.0D;
|
||||
|
||||
/** Maximum longitude value. */
|
||||
public static final double MAX_LON_INCL = 180.0D;
|
||||
|
||||
/** Minimum latitude value. */
|
||||
public static final double MIN_LAT_INCL = -90.0D;
|
||||
|
||||
/** Maximum latitude value. */
|
||||
public static final double MAX_LAT_INCL = 90.0D;
|
||||
|
||||
// magic numbers for bit interleaving
|
||||
private static final long MAGIC[] = {
|
||||
0x5555555555555555L, 0x3333333333333333L,
|
||||
0x0F0F0F0F0F0F0F0FL, 0x00FF00FF00FF00FFL,
|
||||
0x0000FFFF0000FFFFL, 0x00000000FFFFFFFFL,
|
||||
0xAAAAAAAAAAAAAAAAL
|
||||
};
|
||||
// shift values for bit interleaving
|
||||
private static final short SHIFT[] = {1, 2, 4, 8, 16};
|
||||
|
||||
public static double LOG2 = StrictMath.log(2);
|
||||
|
||||
// No instance:
|
||||
private XGeoUtils() {
|
||||
}
|
||||
|
||||
public static Long mortonHash(final double lon, final double lat) {
|
||||
return interleave(scaleLon(lon), scaleLat(lat));
|
||||
}
|
||||
|
||||
public static double mortonUnhashLon(final long hash) {
|
||||
return unscaleLon(deinterleave(hash));
|
||||
}
|
||||
|
||||
public static double mortonUnhashLat(final long hash) {
|
||||
return unscaleLat(deinterleave(hash >>> 1));
|
||||
}
|
||||
|
||||
private static long scaleLon(final double val) {
|
||||
return (long) ((val-MIN_LON) * LON_SCALE);
|
||||
}
|
||||
|
||||
private static long scaleLat(final double val) {
|
||||
return (long) ((val-MIN_LAT) * LAT_SCALE);
|
||||
}
|
||||
|
||||
private static double unscaleLon(final long val) {
|
||||
return (val / LON_SCALE) + MIN_LON;
|
||||
}
|
||||
|
||||
private static double unscaleLat(final long val) {
|
||||
return (val / LAT_SCALE) + MIN_LAT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Interleaves the first 32 bits of each long value
|
||||
*
|
||||
* Adapted from: http://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN
|
||||
*/
|
||||
public static long interleave(long v1, long v2) {
|
||||
v1 = (v1 | (v1 << SHIFT[4])) & MAGIC[4];
|
||||
v1 = (v1 | (v1 << SHIFT[3])) & MAGIC[3];
|
||||
v1 = (v1 | (v1 << SHIFT[2])) & MAGIC[2];
|
||||
v1 = (v1 | (v1 << SHIFT[1])) & MAGIC[1];
|
||||
v1 = (v1 | (v1 << SHIFT[0])) & MAGIC[0];
|
||||
v2 = (v2 | (v2 << SHIFT[4])) & MAGIC[4];
|
||||
v2 = (v2 | (v2 << SHIFT[3])) & MAGIC[3];
|
||||
v2 = (v2 | (v2 << SHIFT[2])) & MAGIC[2];
|
||||
v2 = (v2 | (v2 << SHIFT[1])) & MAGIC[1];
|
||||
v2 = (v2 | (v2 << SHIFT[0])) & MAGIC[0];
|
||||
|
||||
return (v2<<1) | v1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deinterleaves long value back to two concatenated 32bit values
|
||||
*/
|
||||
public static long deinterleave(long b) {
|
||||
b &= MAGIC[0];
|
||||
b = (b ^ (b >>> SHIFT[0])) & MAGIC[1];
|
||||
b = (b ^ (b >>> SHIFT[1])) & MAGIC[2];
|
||||
b = (b ^ (b >>> SHIFT[2])) & MAGIC[3];
|
||||
b = (b ^ (b >>> SHIFT[3])) & MAGIC[4];
|
||||
b = (b ^ (b >>> SHIFT[4])) & MAGIC[5];
|
||||
return b;
|
||||
}
|
||||
|
||||
public static double compare(final double v1, final double v2) {
|
||||
final double compare = v1-v2;
|
||||
return Math.abs(compare) <= TOLERANCE ? 0 : compare;
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts longitude in range of -180 to +180.
|
||||
*/
|
||||
public static double normalizeLon(double lon_deg) {
|
||||
if (lon_deg >= -180 && lon_deg <= 180) {
|
||||
return lon_deg; //common case, and avoids slight double precision shifting
|
||||
}
|
||||
double off = (lon_deg + 180) % 360;
|
||||
if (off < 0) {
|
||||
return 180 + off;
|
||||
} else if (off == 0 && lon_deg > 0) {
|
||||
return 180;
|
||||
} else {
|
||||
return -180 + off;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts latitude in range of -90 to 90.
|
||||
*/
|
||||
public static double normalizeLat(double lat_deg) {
|
||||
if (lat_deg >= -90 && lat_deg <= 90) {
|
||||
return lat_deg; //common case, and avoids slight double precision shifting
|
||||
}
|
||||
double off = Math.abs((lat_deg + 90) % 360);
|
||||
return (off <= 180 ? off : 360-off) - 90;
|
||||
}
|
||||
|
||||
public static final boolean bboxContains(final double lon, final double lat, final double minLon,
|
||||
final double minLat, final double maxLon, final double maxLat) {
|
||||
return (compare(lon, minLon) >= 0 && compare(lon, maxLon) <= 0
|
||||
&& compare(lat, minLat) >= 0 && compare(lat, maxLat) <= 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* simple even-odd point in polygon computation
|
||||
* 1. Determine if point is contained in the longitudinal range
|
||||
* 2. Determine whether point crosses the edge by computing the latitudinal delta
|
||||
* between the end-point of a parallel vector (originating at the point) and the
|
||||
* y-component of the edge sink
|
||||
*
|
||||
* NOTE: Requires polygon point (x,y) order either clockwise or counter-clockwise
|
||||
*/
|
||||
public static boolean pointInPolygon(double[] x, double[] y, double lat, double lon) {
|
||||
assert x.length == y.length;
|
||||
boolean inPoly = false;
|
||||
/**
|
||||
* Note: This is using a euclidean coordinate system which could result in
|
||||
* upwards of 110KM error at the equator.
|
||||
* TODO convert coordinates to cylindrical projection (e.g. mercator)
|
||||
*/
|
||||
for (int i = 1; i < x.length; i++) {
|
||||
if (x[i] < lon && x[i-1] >= lon || x[i-1] < lon && x[i] >= lon) {
|
||||
if (y[i] + (lon - x[i]) / (x[i-1] - x[i]) * (y[i-1] - y[i]) < lat) {
|
||||
inPoly = !inPoly;
|
||||
}
|
||||
}
|
||||
}
|
||||
return inPoly;
|
||||
}
|
||||
|
||||
public static String geoTermToString(long term) {
|
||||
StringBuilder s = new StringBuilder(64);
|
||||
final int numberOfLeadingZeros = Long.numberOfLeadingZeros(term);
|
||||
for (int i = 0; i < numberOfLeadingZeros; i++) {
|
||||
s.append('0');
|
||||
}
|
||||
if (term != 0) {
|
||||
s.append(Long.toBinaryString(term));
|
||||
}
|
||||
return s.toString();
|
||||
}
|
||||
|
||||
|
||||
public static boolean rectDisjoint(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
|
||||
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
|
||||
return (aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether a rectangle is wholly within another rectangle (shared boundaries allowed)
|
||||
*/
|
||||
public static boolean rectWithin(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
|
||||
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
|
||||
return !(aMinX < bMinX || aMinY < bMinY || aMaxX > bMaxX || aMaxY > bMaxY);
|
||||
}
|
||||
|
||||
public static boolean rectCrosses(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
|
||||
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
|
||||
return !(rectDisjoint(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY) ||
|
||||
rectWithin(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY));
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether rectangle a contains rectangle b (touching allowed)
|
||||
*/
|
||||
public static boolean rectContains(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
|
||||
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
|
||||
return !(bMinX < aMinX || bMinY < aMinY || bMaxX > aMaxX || bMaxY > aMaxY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether a rectangle intersects another rectangle (crosses, within, touching, etc)
|
||||
*/
|
||||
public static boolean rectIntersects(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
|
||||
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
|
||||
return !((aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY) );
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether a rectangle crosses a shape. (touching not allowed)
|
||||
*/
|
||||
public static boolean rectCrossesPoly(final double rMinX, final double rMinY, final double rMaxX,
|
||||
final double rMaxY, final double[] shapeX, final double[] shapeY,
|
||||
final double sMinX, final double sMinY, final double sMaxX,
|
||||
final double sMaxY) {
|
||||
// short-circuit: if the bounding boxes are disjoint then the shape does not cross
|
||||
if (rectDisjoint(rMinX, rMinY, rMaxX, rMaxY, sMinX, sMinY, sMaxX, sMaxY)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final double[][] bbox = new double[][] { {rMinX, rMinY}, {rMaxX, rMinY}, {rMaxX, rMaxY}, {rMinX, rMaxY}, {rMinX, rMinY} };
|
||||
final int polyLength = shapeX.length-1;
|
||||
double d, s, t, a1, b1, c1, a2, b2, c2;
|
||||
double x00, y00, x01, y01, x10, y10, x11, y11;
|
||||
|
||||
// computes the intersection point between each bbox edge and the polygon edge
|
||||
for (short b=0; b<4; ++b) {
|
||||
a1 = bbox[b+1][1]-bbox[b][1];
|
||||
b1 = bbox[b][0]-bbox[b+1][0];
|
||||
c1 = a1*bbox[b+1][0] + b1*bbox[b+1][1];
|
||||
for (int p=0; p<polyLength; ++p) {
|
||||
a2 = shapeY[p+1]-shapeY[p];
|
||||
b2 = shapeX[p]-shapeX[p+1];
|
||||
// compute determinant
|
||||
d = a1*b2 - a2*b1;
|
||||
if (d != 0) {
|
||||
// lines are not parallel, check intersecting points
|
||||
c2 = a2*shapeX[p+1] + b2*shapeY[p+1];
|
||||
s = (1/d)*(b2*c1 - b1*c2);
|
||||
t = (1/d)*(a1*c2 - a2*c1);
|
||||
x00 = StrictMath.min(bbox[b][0], bbox[b+1][0]) - TOLERANCE;
|
||||
x01 = StrictMath.max(bbox[b][0], bbox[b+1][0]) + TOLERANCE;
|
||||
y00 = StrictMath.min(bbox[b][1], bbox[b+1][1]) - TOLERANCE;
|
||||
y01 = StrictMath.max(bbox[b][1], bbox[b+1][1]) + TOLERANCE;
|
||||
x10 = StrictMath.min(shapeX[p], shapeX[p+1]) - TOLERANCE;
|
||||
x11 = StrictMath.max(shapeX[p], shapeX[p+1]) + TOLERANCE;
|
||||
y10 = StrictMath.min(shapeY[p], shapeY[p+1]) - TOLERANCE;
|
||||
y11 = StrictMath.max(shapeY[p], shapeY[p+1]) + TOLERANCE;
|
||||
// check whether the intersection point is touching one of the line segments
|
||||
boolean touching = ((x00 == s && y00 == t) || (x01 == s && y01 == t))
|
||||
|| ((x10 == s && y10 == t) || (x11 == s && y11 == t));
|
||||
// if line segments are not touching and the intersection point is within the range of either segment
|
||||
if (!(touching || x00 > s || x01 < s || y00 > t || y01 < t || x10 > s || x11 < s || y10 > t || y11 < t)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} // for each poly edge
|
||||
} // for each bbox edge
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a given circle (defined as a point/radius) to an approximated line-segment polygon
|
||||
*
|
||||
* @param lon longitudinal center of circle (in degrees)
|
||||
* @param lat latitudinal center of circle (in degrees)
|
||||
* @param radius distance radius of circle (in meters)
|
||||
* @return a list of lon/lat points representing the circle
|
||||
*/
|
||||
@SuppressWarnings({"unchecked","rawtypes"})
|
||||
public static ArrayList<double[]> circleToPoly(final double lon, final double lat, final double radius) {
|
||||
double angle;
|
||||
// a little under-sampling (to limit the number of polygonal points): using archimedes estimation of pi
|
||||
final int sides = 25;
|
||||
ArrayList<double[]> geometry = new ArrayList();
|
||||
double[] lons = new double[sides];
|
||||
double[] lats = new double[sides];
|
||||
|
||||
double[] pt = new double[2];
|
||||
final int sidesLen = sides-1;
|
||||
for (int i=0; i<sidesLen; ++i) {
|
||||
angle = (i*360/sides);
|
||||
pt = XGeoProjectionUtils.pointFromLonLatBearing(lon, lat, angle, radius, pt);
|
||||
lons[i] = pt[0];
|
||||
lats[i] = pt[1];
|
||||
}
|
||||
// close the poly
|
||||
lons[sidesLen] = lons[0];
|
||||
lats[sidesLen] = lats[0];
|
||||
geometry.add(lons);
|
||||
geometry.add(lats);
|
||||
|
||||
return geometry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether a rectangle is within a given polygon (shared boundaries allowed)
|
||||
*/
|
||||
public static boolean rectWithinPoly(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
|
||||
final double[] shapeX, final double[] shapeY, final double sMinX,
|
||||
final double sMinY, final double sMaxX, final double sMaxY) {
|
||||
// check if rectangle crosses poly (to handle concave/pacman polys), then check that all 4 corners
|
||||
// are contained
|
||||
return !(rectCrossesPoly(rMinX, rMinY, rMaxX, rMaxY, shapeX, shapeY, sMinX, sMinY, sMaxX, sMaxY) ||
|
||||
!pointInPolygon(shapeX, shapeY, rMinY, rMinX) || !pointInPolygon(shapeX, shapeY, rMinY, rMaxX) ||
|
||||
!pointInPolygon(shapeX, shapeY, rMaxY, rMaxX) || !pointInPolygon(shapeX, shapeY, rMaxY, rMinX));
|
||||
}
|
||||
|
||||
private static boolean rectAnyCornersOutsideCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
|
||||
final double centerLon, final double centerLat, final double radius) {
|
||||
return (SloppyMath.haversin(centerLat, centerLon, rMinY, rMinX)*1000.0 > radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMinX)*1000.0 > radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMaxX)*1000.0 > radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMinY, rMaxX)*1000.0 > radius);
|
||||
}
|
||||
|
||||
private static boolean rectAnyCornersInCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
|
||||
final double centerLon, final double centerLat, final double radius) {
|
||||
return (SloppyMath.haversin(centerLat, centerLon, rMinY, rMinX)*1000.0 <= radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMinX)*1000.0 <= radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMaxX)*1000.0 <= radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMinY, rMaxX)*1000.0 <= radius);
|
||||
}
|
||||
|
||||
public static boolean rectWithinCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
|
||||
final double centerLon, final double centerLat, final double radius) {
|
||||
return !(rectAnyCornersOutsideCircle(rMinX, rMinY, rMaxX, rMaxY, centerLon, centerLat, radius));
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether a rectangle crosses a circle
|
||||
*/
|
||||
public static boolean rectCrossesCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
|
||||
final double centerLon, final double centerLat, final double radius) {
|
||||
return rectAnyCornersInCircle(rMinX, rMinY, rMaxX, rMaxY, centerLon, centerLat, radius)
|
||||
|| lineCrossesSphere(rMinX, rMinY, 0, rMaxX, rMinY, 0, centerLon, centerLat, 0, radius)
|
||||
|| lineCrossesSphere(rMaxX, rMinY, 0, rMaxX, rMaxY, 0, centerLon, centerLat, 0, radius)
|
||||
|| lineCrossesSphere(rMaxX, rMaxY, 0, rMinX, rMaxY, 0, centerLon, centerLat, 0, radius)
|
||||
|| lineCrossesSphere(rMinX, rMaxY, 0, rMinX, rMinY, 0, centerLon, centerLat, 0, radius);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether or a 3dimensional line segment intersects or crosses a sphere
|
||||
*
|
||||
* @param lon1 longitudinal location of the line segment start point (in degrees)
|
||||
* @param lat1 latitudinal location of the line segment start point (in degrees)
|
||||
* @param alt1 altitude of the line segment start point (in degrees)
|
||||
* @param lon2 longitudinal location of the line segment end point (in degrees)
|
||||
* @param lat2 latitudinal location of the line segment end point (in degrees)
|
||||
* @param alt2 altitude of the line segment end point (in degrees)
|
||||
* @param centerLon longitudinal location of center search point (in degrees)
|
||||
* @param centerLat latitudinal location of center search point (in degrees)
|
||||
* @param centerAlt altitude of the center point (in meters)
|
||||
* @param radius search sphere radius (in meters)
|
||||
* @return whether the provided line segment is a secant of the
|
||||
*/
|
||||
private static boolean lineCrossesSphere(double lon1, double lat1, double alt1, double lon2,
|
||||
double lat2, double alt2, double centerLon, double centerLat,
|
||||
double centerAlt, double radius) {
|
||||
// convert to cartesian 3d (in meters)
|
||||
double[] ecf1 = XGeoProjectionUtils.llaToECF(lon1, lat1, alt1, null);
|
||||
double[] ecf2 = XGeoProjectionUtils.llaToECF(lon2, lat2, alt2, null);
|
||||
double[] cntr = XGeoProjectionUtils.llaToECF(centerLon, centerLat, centerAlt, null);
|
||||
|
||||
final double dX = ecf2[0] - ecf1[0];
|
||||
final double dY = ecf2[1] - ecf1[1];
|
||||
final double dZ = ecf2[2] - ecf1[2];
|
||||
final double fX = ecf1[0] - cntr[0];
|
||||
final double fY = ecf1[1] - cntr[1];
|
||||
final double fZ = ecf1[2] - cntr[2];
|
||||
|
||||
final double a = dX*dX + dY*dY + dZ*dZ;
|
||||
final double b = 2 * (fX*dX + fY*dY + fZ*dZ);
|
||||
final double c = (fX*fX + fY*fY + fZ*fZ) - (radius*radius);
|
||||
|
||||
double discrim = (b*b)-(4*a*c);
|
||||
if (discrim < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
discrim = StrictMath.sqrt(discrim);
|
||||
final double a2 = 2*a;
|
||||
final double t1 = (-b - discrim)/a2;
|
||||
final double t2 = (-b + discrim)/a2;
|
||||
|
||||
if ( (t1 < 0 || t1 > 1) ) {
|
||||
return !(t2 < 0 || t2 > 1);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public static boolean isValidLat(double lat) {
|
||||
return Double.isNaN(lat) == false && lat >= MIN_LAT_INCL && lat <= MAX_LAT_INCL;
|
||||
}
|
||||
|
||||
public static boolean isValidLon(double lon) {
|
||||
return Double.isNaN(lon) == false && lon >= MIN_LON_INCL && lon <= MAX_LON_INCL;
|
||||
}
|
||||
}
|
@ -30,7 +30,7 @@ import java.io.IOException;
|
||||
*/
|
||||
public abstract class ActionRequest<T extends ActionRequest> extends TransportRequest {
|
||||
|
||||
protected ActionRequest() {
|
||||
public ActionRequest() {
|
||||
super();
|
||||
}
|
||||
|
||||
|
@ -47,7 +47,7 @@ public class ClusterHealthRequest extends MasterNodeReadRequest<ClusterHealthReq
|
||||
private String waitForNodes = "";
|
||||
private Priority waitForEvents = null;
|
||||
|
||||
ClusterHealthRequest() {
|
||||
public ClusterHealthRequest() {
|
||||
}
|
||||
|
||||
public ClusterHealthRequest(String... indices) {
|
||||
|
@ -124,7 +124,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
|
||||
if (request.waitForNodes().isEmpty()) {
|
||||
waitFor--;
|
||||
}
|
||||
if (request.indices().length == 0) { // check that they actually exists in the meta data
|
||||
if (request.indices() == null || request.indices().length == 0) { // check that they actually exists in the meta data
|
||||
waitFor--;
|
||||
}
|
||||
|
||||
@ -199,7 +199,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
|
||||
if (request.waitForActiveShards() != -1 && response.getActiveShards() >= request.waitForActiveShards()) {
|
||||
waitForCounter++;
|
||||
}
|
||||
if (request.indices().length > 0) {
|
||||
if (request.indices() != null && request.indices().length > 0) {
|
||||
try {
|
||||
indexNameExpressionResolver.concreteIndices(clusterState, IndicesOptions.strictExpand(), request.indices());
|
||||
waitForCounter++;
|
||||
|
@ -38,7 +38,7 @@ public class NodesHotThreadsRequest extends BaseNodesRequest<NodesHotThreadsRequ
|
||||
boolean ignoreIdleThreads = true;
|
||||
|
||||
// for serialization
|
||||
NodesHotThreadsRequest() {
|
||||
public NodesHotThreadsRequest() {
|
||||
|
||||
}
|
||||
|
||||
|
@ -94,11 +94,11 @@ public class TransportNodesHotThreadsAction extends TransportNodesAction<NodesHo
|
||||
return false;
|
||||
}
|
||||
|
||||
static class NodeRequest extends BaseNodeRequest {
|
||||
public static class NodeRequest extends BaseNodeRequest {
|
||||
|
||||
NodesHotThreadsRequest request;
|
||||
|
||||
NodeRequest() {
|
||||
public NodeRequest() {
|
||||
}
|
||||
|
||||
NodeRequest(String nodeId, NodesHotThreadsRequest request) {
|
||||
|
@ -88,11 +88,11 @@ public class TransportNodesInfoAction extends TransportNodesAction<NodesInfoRequ
|
||||
return false;
|
||||
}
|
||||
|
||||
static class NodeInfoRequest extends BaseNodeRequest {
|
||||
public static class NodeInfoRequest extends BaseNodeRequest {
|
||||
|
||||
NodesInfoRequest request;
|
||||
|
||||
NodeInfoRequest() {
|
||||
public NodeInfoRequest() {
|
||||
}
|
||||
|
||||
NodeInfoRequest(String nodeId, NodesInfoRequest request) {
|
||||
|
@ -42,7 +42,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
||||
private boolean breaker;
|
||||
private boolean script;
|
||||
|
||||
protected NodesStatsRequest() {
|
||||
public NodesStatsRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -88,11 +88,11 @@ public class TransportNodesStatsAction extends TransportNodesAction<NodesStatsRe
|
||||
return false;
|
||||
}
|
||||
|
||||
static class NodeStatsRequest extends BaseNodeRequest {
|
||||
public static class NodeStatsRequest extends BaseNodeRequest {
|
||||
|
||||
NodesStatsRequest request;
|
||||
|
||||
NodeStatsRequest() {
|
||||
public NodeStatsRequest() {
|
||||
}
|
||||
|
||||
NodeStatsRequest(String nodeId, NodesStatsRequest request) {
|
||||
|
@ -37,7 +37,7 @@ public class DeleteRepositoryRequest extends AcknowledgedRequest<DeleteRepositor
|
||||
|
||||
private String name;
|
||||
|
||||
DeleteRepositoryRequest() {
|
||||
public DeleteRepositoryRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -36,7 +36,7 @@ public class GetRepositoriesRequest extends MasterNodeReadRequest<GetRepositorie
|
||||
|
||||
private String[] repositories = Strings.EMPTY_ARRAY;
|
||||
|
||||
GetRepositoriesRequest() {
|
||||
public GetRepositoriesRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -55,7 +55,7 @@ public class PutRepositoryRequest extends AcknowledgedRequest<PutRepositoryReque
|
||||
|
||||
private Settings settings = EMPTY_SETTINGS;
|
||||
|
||||
PutRepositoryRequest() {
|
||||
public PutRepositoryRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -37,7 +37,7 @@ public class VerifyRepositoryRequest extends AcknowledgedRequest<VerifyRepositor
|
||||
|
||||
private String name;
|
||||
|
||||
VerifyRepositoryRequest() {
|
||||
public VerifyRepositoryRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -79,7 +79,7 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
|
||||
|
||||
private boolean waitForCompletion;
|
||||
|
||||
CreateSnapshotRequest() {
|
||||
public CreateSnapshotRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -41,7 +41,7 @@ public class GetSnapshotsRequest extends MasterNodeRequest<GetSnapshotsRequest>
|
||||
|
||||
private String[] snapshots = Strings.EMPTY_ARRAY;
|
||||
|
||||
GetSnapshotsRequest() {
|
||||
public GetSnapshotsRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -64,7 +64,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
|
||||
private Settings indexSettings = EMPTY_SETTINGS;
|
||||
private String[] ignoreIndexSettings = Strings.EMPTY_ARRAY;
|
||||
|
||||
RestoreSnapshotRequest() {
|
||||
public RestoreSnapshotRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
@ -537,7 +537,9 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
|
||||
throw new IllegalArgumentException("malformed ignore_index_settings section, should be an array of strings");
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unknown parameter " + name);
|
||||
if (IndicesOptions.isIndicesOptions(name) == false) {
|
||||
throw new IllegalArgumentException("Unknown parameter " + name);
|
||||
}
|
||||
}
|
||||
}
|
||||
indicesOptions(IndicesOptions.fromMap((Map<String, Object>) source, IndicesOptions.lenientExpandOpen()));
|
||||
|
@ -137,7 +137,7 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction<Transpor
|
||||
return true;
|
||||
}
|
||||
|
||||
static class Request extends BaseNodesRequest<Request> {
|
||||
public static class Request extends BaseNodesRequest<Request> {
|
||||
|
||||
private SnapshotId[] snapshotIds;
|
||||
|
||||
@ -203,11 +203,11 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction<Transpor
|
||||
}
|
||||
|
||||
|
||||
static class NodeRequest extends BaseNodeRequest {
|
||||
public static class NodeRequest extends BaseNodeRequest {
|
||||
|
||||
private SnapshotId[] snapshotIds;
|
||||
|
||||
NodeRequest() {
|
||||
public NodeRequest() {
|
||||
}
|
||||
|
||||
NodeRequest(String nodeId, TransportNodesSnapshotsStatus.Request request) {
|
||||
|
@ -30,7 +30,7 @@ import java.io.IOException;
|
||||
*/
|
||||
public class ClusterStatsRequest extends BaseNodesRequest<ClusterStatsRequest> {
|
||||
|
||||
ClusterStatsRequest() {
|
||||
public ClusterStatsRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -145,11 +145,11 @@ public class TransportClusterStatsAction extends TransportNodesAction<ClusterSta
|
||||
return false;
|
||||
}
|
||||
|
||||
static class ClusterStatsNodeRequest extends BaseNodeRequest {
|
||||
public static class ClusterStatsNodeRequest extends BaseNodeRequest {
|
||||
|
||||
ClusterStatsRequest request;
|
||||
|
||||
ClusterStatsNodeRequest() {
|
||||
public ClusterStatsNodeRequest() {
|
||||
}
|
||||
|
||||
ClusterStatsNodeRequest(String nodeId, ClusterStatsRequest request) {
|
||||
|
@ -37,7 +37,7 @@ public class ClearIndicesCacheRequest extends BroadcastRequest<ClearIndicesCache
|
||||
private String[] fields = null;
|
||||
|
||||
|
||||
ClearIndicesCacheRequest() {
|
||||
public ClearIndicesCacheRequest() {
|
||||
}
|
||||
|
||||
public ClearIndicesCacheRequest(String... indices) {
|
||||
|
@ -39,7 +39,7 @@ public class CloseIndexRequest extends AcknowledgedRequest<CloseIndexRequest> im
|
||||
private String[] indices;
|
||||
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false);
|
||||
|
||||
CloseIndexRequest() {
|
||||
public CloseIndexRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -78,7 +78,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
||||
|
||||
private boolean updateAllTypes = false;
|
||||
|
||||
CreateIndexRequest() {
|
||||
public CreateIndexRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -44,7 +44,7 @@ public class DeleteIndexRequest extends MasterNodeRequest<DeleteIndexRequest> im
|
||||
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true);
|
||||
private TimeValue timeout = AcknowledgedRequest.DEFAULT_ACK_TIMEOUT;
|
||||
|
||||
DeleteIndexRequest() {
|
||||
public DeleteIndexRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -37,7 +37,7 @@ public class IndicesExistsRequest extends MasterNodeReadRequest<IndicesExistsReq
|
||||
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true);
|
||||
|
||||
// for serialization
|
||||
IndicesExistsRequest() {
|
||||
public IndicesExistsRequest() {
|
||||
|
||||
}
|
||||
|
||||
|
@ -38,7 +38,7 @@ public class TypesExistsRequest extends MasterNodeReadRequest<TypesExistsRequest
|
||||
|
||||
private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen();
|
||||
|
||||
TypesExistsRequest() {
|
||||
public TypesExistsRequest() {
|
||||
}
|
||||
|
||||
public TypesExistsRequest(String[] indices, String... types) {
|
||||
|
@ -42,7 +42,7 @@ public class FlushRequest extends BroadcastRequest<FlushRequest> {
|
||||
private boolean force = false;
|
||||
private boolean waitIfOngoing = false;
|
||||
|
||||
FlushRequest() {
|
||||
public FlushRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -29,7 +29,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
class GetFieldMappingsIndexRequest extends SingleShardRequest<GetFieldMappingsIndexRequest> {
|
||||
public class GetFieldMappingsIndexRequest extends SingleShardRequest<GetFieldMappingsIndexRequest> {
|
||||
|
||||
private boolean probablySingleFieldRequest;
|
||||
private boolean includeDefaults;
|
||||
@ -38,7 +38,7 @@ class GetFieldMappingsIndexRequest extends SingleShardRequest<GetFieldMappingsIn
|
||||
|
||||
private OriginalIndices originalIndices;
|
||||
|
||||
GetFieldMappingsIndexRequest() {
|
||||
public GetFieldMappingsIndexRequest() {
|
||||
}
|
||||
|
||||
GetFieldMappingsIndexRequest(GetFieldMappingsRequest other, String index, boolean probablySingleFieldRequest) {
|
||||
|
@ -65,7 +65,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
||||
|
||||
private boolean updateAllTypes = false;
|
||||
|
||||
PutMappingRequest() {
|
||||
public PutMappingRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -39,7 +39,7 @@ public class OpenIndexRequest extends AcknowledgedRequest<OpenIndexRequest> impl
|
||||
private String[] indices;
|
||||
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, false, true);
|
||||
|
||||
OpenIndexRequest() {
|
||||
public OpenIndexRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -33,7 +33,7 @@ import org.elasticsearch.action.support.broadcast.BroadcastRequest;
|
||||
*/
|
||||
public class RefreshRequest extends BroadcastRequest<RefreshRequest> {
|
||||
|
||||
RefreshRequest() {
|
||||
public RefreshRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -48,7 +48,7 @@ public class UpdateSettingsRequest extends AcknowledgedRequest<UpdateSettingsReq
|
||||
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true);
|
||||
private Settings settings = EMPTY_SETTINGS;
|
||||
|
||||
UpdateSettingsRequest() {
|
||||
public UpdateSettingsRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -46,7 +46,7 @@ public class IndicesShardStoresRequest extends MasterNodeReadRequest<IndicesShar
|
||||
this.indices = indices;
|
||||
}
|
||||
|
||||
IndicesShardStoresRequest() {
|
||||
public IndicesShardStoresRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -74,7 +74,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
||||
|
||||
private Map<String, IndexMetaData.Custom> customs = new HashMap<>();
|
||||
|
||||
PutIndexTemplateRequest() {
|
||||
public PutIndexTemplateRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -30,11 +30,11 @@ import java.io.IOException;
|
||||
/**
|
||||
*
|
||||
*/
|
||||
final class ShardUpgradeRequest extends BroadcastShardRequest {
|
||||
public final class ShardUpgradeRequest extends BroadcastShardRequest {
|
||||
|
||||
private UpgradeRequest request = new UpgradeRequest();
|
||||
|
||||
ShardUpgradeRequest() {
|
||||
public ShardUpgradeRequest() {
|
||||
}
|
||||
|
||||
ShardUpgradeRequest(ShardId shardId, UpgradeRequest request) {
|
||||
|
@ -39,7 +39,7 @@ public class UpgradeSettingsRequest extends AcknowledgedRequest<UpgradeSettingsR
|
||||
|
||||
private Map<String, Tuple<Version, String>> versions;
|
||||
|
||||
UpgradeSettingsRequest() {
|
||||
public UpgradeSettingsRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -32,7 +32,7 @@ import java.io.IOException;
|
||||
/**
|
||||
* Internal validate request executed directly against a specific index shard.
|
||||
*/
|
||||
class ShardValidateQueryRequest extends BroadcastShardRequest {
|
||||
public class ShardValidateQueryRequest extends BroadcastShardRequest {
|
||||
|
||||
private BytesReference source;
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
@ -43,7 +43,7 @@ class ShardValidateQueryRequest extends BroadcastShardRequest {
|
||||
@Nullable
|
||||
private String[] filteringAliases;
|
||||
|
||||
ShardValidateQueryRequest() {
|
||||
public ShardValidateQueryRequest() {
|
||||
|
||||
}
|
||||
|
||||
|
@ -55,7 +55,7 @@ public class ValidateQueryRequest extends BroadcastRequest<ValidateQueryRequest>
|
||||
|
||||
long nowInMillis;
|
||||
|
||||
ValidateQueryRequest() {
|
||||
public ValidateQueryRequest() {
|
||||
this(Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
|
@ -38,7 +38,7 @@ public class TransportRenderSearchTemplateAction extends HandledTransportAction<
|
||||
private final ScriptService scriptService;
|
||||
|
||||
@Inject
|
||||
protected TransportRenderSearchTemplateAction(ScriptService scriptService, Settings settings, ThreadPool threadPool,
|
||||
public TransportRenderSearchTemplateAction(ScriptService scriptService, Settings settings, ThreadPool threadPool,
|
||||
TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(settings, RenderSearchTemplateAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, RenderSearchTemplateRequest.class);
|
||||
this.scriptService = scriptService;
|
||||
|
@ -42,7 +42,7 @@ public class DeleteWarmerRequest extends AcknowledgedRequest<DeleteWarmerRequest
|
||||
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false);
|
||||
private String[] indices = Strings.EMPTY_ARRAY;
|
||||
|
||||
DeleteWarmerRequest() {
|
||||
public DeleteWarmerRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -44,7 +44,7 @@ public class PutWarmerRequest extends AcknowledgedRequest<PutWarmerRequest> impl
|
||||
|
||||
private SearchRequest searchRequest;
|
||||
|
||||
PutWarmerRequest() {
|
||||
public PutWarmerRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -37,7 +37,7 @@ public class BulkShardRequest extends ReplicationRequest<BulkShardRequest> {
|
||||
|
||||
private boolean refresh;
|
||||
|
||||
BulkShardRequest() {
|
||||
public BulkShardRequest() {
|
||||
}
|
||||
|
||||
BulkShardRequest(BulkRequest bulkRequest, String index, int shardId, boolean refresh, BulkItemRequest[] items) {
|
||||
|
@ -55,7 +55,7 @@ public class ExistsRequest extends BroadcastRequest<ExistsRequest> {
|
||||
|
||||
long nowInMillis;
|
||||
|
||||
ExistsRequest() {
|
||||
public ExistsRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -29,7 +29,7 @@ import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
class ShardExistsRequest extends BroadcastShardRequest {
|
||||
public class ShardExistsRequest extends BroadcastShardRequest {
|
||||
|
||||
private float minScore;
|
||||
|
||||
@ -42,7 +42,7 @@ class ShardExistsRequest extends BroadcastShardRequest {
|
||||
@Nullable
|
||||
private String[] filteringAliases;
|
||||
|
||||
ShardExistsRequest() {
|
||||
public ShardExistsRequest() {
|
||||
}
|
||||
|
||||
ShardExistsRequest(ShardId shardId, @Nullable String[] filteringAliases, ExistsRequest request) {
|
||||
|
@ -49,7 +49,7 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
|
||||
|
||||
long nowInMillis;
|
||||
|
||||
ExplainRequest() {
|
||||
public ExplainRequest() {
|
||||
}
|
||||
|
||||
public ExplainRequest(String index, String type, String id) {
|
||||
|
@ -63,7 +63,7 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
||||
private long version = Versions.MATCH_ANY;
|
||||
private boolean ignoreErrorsOnGeneratedFields;
|
||||
|
||||
GetRequest() {
|
||||
public GetRequest() {
|
||||
type = "_all";
|
||||
}
|
||||
|
||||
|
@ -40,7 +40,7 @@ public class MultiGetShardRequest extends SingleShardRequest<MultiGetShardReques
|
||||
IntArrayList locations;
|
||||
List<MultiGetRequest.Item> items;
|
||||
|
||||
MultiGetShardRequest() {
|
||||
public MultiGetShardRequest() {
|
||||
|
||||
}
|
||||
|
||||
|
@ -39,7 +39,7 @@ public class PercolateShardRequest extends BroadcastShardRequest {
|
||||
private int numberOfShards;
|
||||
private long startTime;
|
||||
|
||||
PercolateShardRequest() {
|
||||
public PercolateShardRequest() {
|
||||
}
|
||||
|
||||
PercolateShardRequest(ShardId shardId, int numberOfShards, PercolateRequest request) {
|
||||
|
@ -118,7 +118,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
|
||||
private String preference;
|
||||
private List<Item> items;
|
||||
|
||||
Request() {
|
||||
public Request() {
|
||||
}
|
||||
|
||||
Request(MultiPercolateRequest multiPercolateRequest, String concreteIndex, int shardId, String preference) {
|
||||
|
@ -251,48 +251,6 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The source of the search request. Consider using either {@link #source(byte[])} or
|
||||
* {@link #source(org.elasticsearch.search.builder.SearchSourceBuilder)}.
|
||||
*/
|
||||
public SearchRequest source(String source) {
|
||||
this.source = new BytesArray(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The source of the search request in the form of a map.
|
||||
*/
|
||||
public SearchRequest source(Map source) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE);
|
||||
builder.map(source);
|
||||
return source(builder);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
public SearchRequest source(XContentBuilder builder) {
|
||||
this.source = builder.bytes();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The search source to execute.
|
||||
*/
|
||||
public SearchRequest source(byte[] source) {
|
||||
return source(source, 0, source.length);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The search source to execute.
|
||||
*/
|
||||
public SearchRequest source(byte[] source, int offset, int length) {
|
||||
return source(new BytesArray(source, offset, length));
|
||||
}
|
||||
|
||||
/**
|
||||
* The search source to execute.
|
||||
*/
|
||||
@ -301,6 +259,7 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The search source to execute.
|
||||
*/
|
||||
@ -327,51 +286,6 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchRequest extraSource(Map extraSource) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE);
|
||||
builder.map(extraSource);
|
||||
return extraSource(builder);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchGenerationException("Failed to generate [" + extraSource + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
public SearchRequest extraSource(XContentBuilder builder) {
|
||||
this.extraSource = builder.bytes();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to provide additional source that will use used as well.
|
||||
*/
|
||||
public SearchRequest extraSource(String source) {
|
||||
this.extraSource = new BytesArray(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to provide additional source that will be used as well.
|
||||
*/
|
||||
public SearchRequest extraSource(byte[] source) {
|
||||
return extraSource(source, 0, source.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to provide additional source that will be used as well.
|
||||
*/
|
||||
public SearchRequest extraSource(byte[] source, int offset, int length) {
|
||||
return extraSource(new BytesArray(source, offset, length));
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to provide additional source that will be used as well.
|
||||
*/
|
||||
public SearchRequest extraSource(BytesReference source) {
|
||||
this.extraSource = source;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to provide template as source.
|
||||
*/
|
||||
|
@ -806,27 +806,19 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Note, settings anything other
|
||||
* Sets the source of the request as a SearchSourceBuilder. Note, settings anything other
|
||||
* than the search type will cause this source to be overridden, consider using
|
||||
* {@link #setExtraSource(String)}.
|
||||
* {@link #setExtraSource(SearchSourceBuilder)} instead.
|
||||
*/
|
||||
public SearchRequestBuilder setSource(String source) {
|
||||
public SearchRequestBuilder setSource(SearchSourceBuilder source) {
|
||||
request.source(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Allows to set other parameters.
|
||||
*/
|
||||
public SearchRequestBuilder setExtraSource(String source) {
|
||||
request.extraSource(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Note, settings anything other
|
||||
* than the search type will cause this source to be overridden, consider using
|
||||
* {@link #setExtraSource(BytesReference)}.
|
||||
* {@link #setExtraSource(SearchSourceBuilder)} instead.
|
||||
*/
|
||||
public SearchRequestBuilder setSource(BytesReference source) {
|
||||
request.source(source);
|
||||
@ -834,78 +826,11 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Note, settings anything other
|
||||
* than the search type will cause this source to be overridden, consider using
|
||||
* {@link #setExtraSource(byte[])}.
|
||||
* Sets the an addtional source of the request as a SearchSourceBuilder. All values and
|
||||
* settings set on the extra source will override the corresponding settings on the specified
|
||||
* source.
|
||||
*/
|
||||
public SearchRequestBuilder setSource(byte[] source) {
|
||||
request.source(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Allows to set other parameters.
|
||||
*/
|
||||
public SearchRequestBuilder setExtraSource(BytesReference source) {
|
||||
request.extraSource(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Allows to set other parameters.
|
||||
*/
|
||||
public SearchRequestBuilder setExtraSource(byte[] source) {
|
||||
request.extraSource(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Note, settings anything other
|
||||
* than the search type will cause this source to be overridden, consider using
|
||||
* {@link #setExtraSource(byte[])}.
|
||||
*/
|
||||
public SearchRequestBuilder setSource(byte[] source, int offset, int length) {
|
||||
request.source(source, offset, length);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Allows to set other parameters.
|
||||
*/
|
||||
public SearchRequestBuilder setExtraSource(byte[] source, int offset, int length) {
|
||||
request.extraSource(source, offset, length);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Note, settings anything other
|
||||
* than the search type will cause this source to be overridden, consider using
|
||||
* {@link #setExtraSource(byte[])}.
|
||||
*/
|
||||
public SearchRequestBuilder setSource(XContentBuilder builder) {
|
||||
request.source(builder);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a json string. Allows to set other parameters.
|
||||
*/
|
||||
public SearchRequestBuilder setExtraSource(XContentBuilder builder) {
|
||||
request.extraSource(builder);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source of the request as a map. Note, setting anything other than the
|
||||
* search type will cause this source to be overridden, consider using
|
||||
* {@link #setExtraSource(java.util.Map)}.
|
||||
*/
|
||||
public SearchRequestBuilder setSource(Map source) {
|
||||
request.source(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchRequestBuilder setExtraSource(Map source) {
|
||||
public SearchRequestBuilder setExtraSource(SearchSourceBuilder source) {
|
||||
request.extraSource(source);
|
||||
return this;
|
||||
}
|
||||
@ -913,39 +838,11 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
||||
/**
|
||||
* template stuff
|
||||
*/
|
||||
|
||||
public SearchRequestBuilder setTemplate(Template template) {
|
||||
request.template(template);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #setTemplate(Template)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public SearchRequestBuilder setTemplateName(String templateName) {
|
||||
request.templateName(templateName);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #setTemplate(Template)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public SearchRequestBuilder setTemplateType(ScriptService.ScriptType templateType) {
|
||||
request.templateType(templateType);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #setTemplate(Template)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public SearchRequestBuilder setTemplateParams(Map<String, Object> templateParams) {
|
||||
request.templateParams(templateParams);
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchRequestBuilder setTemplateSource(String source) {
|
||||
request.templateSource(source);
|
||||
return this;
|
||||
|
@ -30,11 +30,11 @@ import java.io.IOException;
|
||||
/**
|
||||
* Internal suggest request executed directly against a specific index shard.
|
||||
*/
|
||||
final class ShardSuggestRequest extends BroadcastShardRequest {
|
||||
public final class ShardSuggestRequest extends BroadcastShardRequest {
|
||||
|
||||
private BytesReference suggestSource;
|
||||
|
||||
ShardSuggestRequest() {
|
||||
public ShardSuggestRequest() {
|
||||
}
|
||||
|
||||
ShardSuggestRequest(ShardId shardId, SuggestRequest request) {
|
||||
|
@ -58,7 +58,7 @@ public final class SuggestRequest extends BroadcastRequest<SuggestRequest> {
|
||||
|
||||
private BytesReference suggestSource;
|
||||
|
||||
SuggestRequest() {
|
||||
public SuggestRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -154,6 +154,16 @@ public class IndicesOptions {
|
||||
defaultSettings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the name represents a valid name for one of the indices option
|
||||
* false otherwise
|
||||
*/
|
||||
public static boolean isIndicesOptions(String name) {
|
||||
return "expand_wildcards".equals(name) || "expandWildcards".equals(name) ||
|
||||
"ignore_unavailable".equals(name) || "ignoreUnavailable".equals(name) ||
|
||||
"allow_no_indices".equals(name) || "allowNoIndices".equals(name);
|
||||
}
|
||||
|
||||
public static IndicesOptions fromParameters(Object wildcardsString, Object ignoreUnavailableString, Object allowNoIndicesString, IndicesOptions defaultSettings) {
|
||||
if (wildcardsString == null && ignoreUnavailableString == null && allowNoIndicesString == null) {
|
||||
return defaultSettings;
|
||||
|
@ -38,7 +38,7 @@ public abstract class BroadcastShardRequest extends TransportRequest implements
|
||||
|
||||
protected OriginalIndices originalIndices;
|
||||
|
||||
protected BroadcastShardRequest() {
|
||||
public BroadcastShardRequest() {
|
||||
}
|
||||
|
||||
protected BroadcastShardRequest(ShardId shardId, BroadcastRequest request) {
|
||||
|
@ -19,16 +19,8 @@
|
||||
|
||||
package org.elasticsearch.action.support.broadcast.node;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.NoShardAvailableActionException;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
|
||||
import org.elasticsearch.action.support.HandledTransportAction;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.TransportActions;
|
||||
import org.elasticsearch.action.*;
|
||||
import org.elasticsearch.action.support.*;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastRequest;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
|
||||
@ -45,21 +37,13 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.BaseTransportResponseHandler;
|
||||
import org.elasticsearch.transport.NodeShouldNotConnectException;
|
||||
import org.elasticsearch.transport.TransportChannel;
|
||||
import org.elasticsearch.transport.TransportException;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.transport.TransportRequestHandler;
|
||||
import org.elasticsearch.transport.TransportResponse;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.transport.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
|
||||
@ -100,15 +84,10 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
||||
|
||||
transportNodeBroadcastAction = actionName + "[n]";
|
||||
|
||||
transportService.registerRequestHandler(transportNodeBroadcastAction, new Callable<NodeRequest>() {
|
||||
@Override
|
||||
public NodeRequest call() throws Exception {
|
||||
return new NodeRequest();
|
||||
}
|
||||
}, executor, new BroadcastByNodeTransportRequestHandler());
|
||||
transportService.registerRequestHandler(transportNodeBroadcastAction, NodeRequest::new, executor, new BroadcastByNodeTransportRequestHandler());
|
||||
}
|
||||
|
||||
private final Response newResponse(
|
||||
private Response newResponse(
|
||||
Request request,
|
||||
AtomicReferenceArray responses,
|
||||
List<NoShardAvailableActionException> unavailableShardExceptions,
|
||||
@ -253,7 +232,7 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
||||
if (shard.assignedToNode()) {
|
||||
String nodeId = shard.currentNodeId();
|
||||
if (!nodeIds.containsKey(nodeId)) {
|
||||
nodeIds.put(nodeId, new ArrayList<ShardRouting>());
|
||||
nodeIds.put(nodeId, new ArrayList<>());
|
||||
}
|
||||
nodeIds.get(nodeId).add(shard);
|
||||
} else {
|
||||
@ -405,14 +384,14 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
||||
}
|
||||
}
|
||||
|
||||
protected class NodeRequest extends TransportRequest implements IndicesRequest {
|
||||
public class NodeRequest extends TransportRequest implements IndicesRequest {
|
||||
private String nodeId;
|
||||
|
||||
private List<ShardRouting> shards;
|
||||
|
||||
protected Request indicesLevelRequest;
|
||||
|
||||
protected NodeRequest() {
|
||||
public NodeRequest() {
|
||||
}
|
||||
|
||||
public NodeRequest(String nodeId, Request request, List<ShardRouting> shards) {
|
||||
|
@ -32,7 +32,7 @@ public abstract class BaseNodeRequest extends TransportRequest {
|
||||
|
||||
private String nodeId;
|
||||
|
||||
protected BaseNodeRequest() {
|
||||
public BaseNodeRequest() {
|
||||
|
||||
}
|
||||
|
||||
|
@ -49,7 +49,7 @@ public abstract class SingleShardRequest<T extends SingleShardRequest> extends A
|
||||
ShardId internalShardId;
|
||||
private boolean threadedOperation = true;
|
||||
|
||||
protected SingleShardRequest() {
|
||||
public SingleShardRequest() {
|
||||
}
|
||||
|
||||
protected SingleShardRequest(String index) {
|
||||
|
@ -37,7 +37,7 @@ public class MultiTermVectorsShardRequest extends SingleShardRequest<MultiTermVe
|
||||
IntArrayList locations;
|
||||
List<TermVectorsRequest> requests;
|
||||
|
||||
MultiTermVectorsShardRequest() {
|
||||
public MultiTermVectorsShardRequest() {
|
||||
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,7 @@ public class DfsOnlyRequest extends BroadcastRequest<DfsOnlyRequest> {
|
||||
|
||||
long nowInMillis;
|
||||
|
||||
DfsOnlyRequest() {
|
||||
public DfsOnlyRequest() {
|
||||
|
||||
}
|
||||
|
||||
|
@ -29,11 +29,11 @@ import org.elasticsearch.search.internal.ShardSearchTransportRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
class ShardDfsOnlyRequest extends BroadcastShardRequest {
|
||||
public class ShardDfsOnlyRequest extends BroadcastShardRequest {
|
||||
|
||||
private ShardSearchTransportRequest shardSearchRequest = new ShardSearchTransportRequest();
|
||||
|
||||
ShardDfsOnlyRequest() {
|
||||
public ShardDfsOnlyRequest() {
|
||||
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,35 @@ final class ESPolicy extends Policy {
|
||||
}
|
||||
}
|
||||
|
||||
// Special handling for broken AWS code which destroys all SSL security
|
||||
// REMOVE THIS when https://github.com/aws/aws-sdk-java/pull/432 is fixed
|
||||
if (permission instanceof RuntimePermission && "accessClassInPackage.sun.security.ssl".equals(permission.getName())) {
|
||||
for (StackTraceElement element : Thread.currentThread().getStackTrace()) {
|
||||
if ("com.amazonaws.http.conn.ssl.SdkTLSSocketFactory".equals(element.getClassName()) &&
|
||||
"verifyMasterSecret".equals(element.getMethodName())) {
|
||||
// we found the horrible method: the hack begins!
|
||||
// force the aws code to back down, by throwing an exception that it catches.
|
||||
rethrow(new IllegalAccessException("no amazon, you cannot do this."));
|
||||
}
|
||||
}
|
||||
}
|
||||
// otherwise defer to template + dynamic file permissions
|
||||
return template.implies(domain, permission) || dynamic.implies(permission);
|
||||
}
|
||||
|
||||
/**
|
||||
* Classy puzzler to rethrow any checked exception as an unchecked one.
|
||||
*/
|
||||
private static class Rethrower<T extends Throwable> {
|
||||
private void rethrow(Throwable t) throws T {
|
||||
throw (T) t;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rethrows <code>t</code> (identical object).
|
||||
*/
|
||||
private void rethrow(Throwable t) {
|
||||
new Rethrower<Error>().rethrow(t);
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.net.URLClassLoader;
|
||||
import java.nio.file.FileVisitResult;
|
||||
@ -70,21 +71,43 @@ public class JarHell {
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the current classloader for duplicate classes
|
||||
* Checks the current classpath for duplicate classes
|
||||
* @throws IllegalStateException if jar hell was found
|
||||
*/
|
||||
public static void checkJarHell() throws Exception {
|
||||
ClassLoader loader = JarHell.class.getClassLoader();
|
||||
if (loader instanceof URLClassLoader == false) {
|
||||
return;
|
||||
}
|
||||
ESLogger logger = Loggers.getLogger(JarHell.class);
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("java.class.path: {}", System.getProperty("java.class.path"));
|
||||
logger.debug("sun.boot.class.path: {}", System.getProperty("sun.boot.class.path"));
|
||||
logger.debug("classloader urls: {}", Arrays.toString(((URLClassLoader)loader).getURLs()));
|
||||
if (loader instanceof URLClassLoader ) {
|
||||
logger.debug("classloader urls: {}", Arrays.toString(((URLClassLoader)loader).getURLs()));
|
||||
}
|
||||
}
|
||||
checkJarHell(((URLClassLoader) loader).getURLs());
|
||||
checkJarHell(parseClassPath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the classpath into a set of URLs
|
||||
*/
|
||||
@SuppressForbidden(reason = "resolves against CWD because that is how classpaths work")
|
||||
public static URL[] parseClassPath() {
|
||||
String elements[] = System.getProperty("java.class.path").split(System.getProperty("path.separator"));
|
||||
URL urlElements[] = new URL[elements.length];
|
||||
for (int i = 0; i < elements.length; i++) {
|
||||
String element = elements[i];
|
||||
// empty classpath element behaves like CWD.
|
||||
if (element.isEmpty()) {
|
||||
element = System.getProperty("user.dir");
|
||||
}
|
||||
try {
|
||||
urlElements[i] = PathUtils.get(element).toUri().toURL();
|
||||
} catch (MalformedURLException e) {
|
||||
// should not happen, as we use the filesystem API
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
return urlElements;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -24,7 +24,6 @@ import org.elasticsearch.env.Environment;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URL;
|
||||
import java.net.URLClassLoader;
|
||||
import java.nio.file.AccessMode;
|
||||
import java.nio.file.FileAlreadyExistsException;
|
||||
import java.nio.file.Files;
|
||||
@ -121,8 +120,8 @@ final class Security {
|
||||
private static final Map<Pattern,String> SPECIAL_JARS;
|
||||
static {
|
||||
Map<Pattern,String> m = new IdentityHashMap<>();
|
||||
m.put(Pattern.compile(".*lucene-core-.*\\.jar$"), "es.security.jar.lucene.core");
|
||||
m.put(Pattern.compile(".*securemock-.*\\.jar$"), "es.security.jar.elasticsearch.securemock");
|
||||
m.put(Pattern.compile(".*lucene-core-.*\\.jar$"), "es.security.jar.lucene.core");
|
||||
m.put(Pattern.compile(".*securemock-.*\\.jar$"), "es.security.jar.elasticsearch.securemock");
|
||||
SPECIAL_JARS = Collections.unmodifiableMap(m);
|
||||
}
|
||||
|
||||
@ -133,27 +132,21 @@ final class Security {
|
||||
*/
|
||||
@SuppressForbidden(reason = "proper use of URL")
|
||||
static void setCodebaseProperties() {
|
||||
ClassLoader loader = Security.class.getClassLoader();
|
||||
if (loader instanceof URLClassLoader) {
|
||||
for (URL url : ((URLClassLoader)loader).getURLs()) {
|
||||
for (Map.Entry<Pattern,String> e : SPECIAL_JARS.entrySet()) {
|
||||
if (e.getKey().matcher(url.getPath()).matches()) {
|
||||
String prop = e.getValue();
|
||||
if (System.getProperty(prop) != null) {
|
||||
throw new IllegalStateException("property: " + prop + " is unexpectedly set: " + System.getProperty(prop));
|
||||
}
|
||||
System.setProperty(prop, url.toString());
|
||||
for (URL url : JarHell.parseClassPath()) {
|
||||
for (Map.Entry<Pattern,String> e : SPECIAL_JARS.entrySet()) {
|
||||
if (e.getKey().matcher(url.getPath()).matches()) {
|
||||
String prop = e.getValue();
|
||||
if (System.getProperty(prop) != null) {
|
||||
throw new IllegalStateException("property: " + prop + " is unexpectedly set: " + System.getProperty(prop));
|
||||
}
|
||||
System.setProperty(prop, url.toString());
|
||||
}
|
||||
}
|
||||
for (String prop : SPECIAL_JARS.values()) {
|
||||
if (System.getProperty(prop) == null) {
|
||||
System.setProperty(prop, "file:/dev/null"); // no chance to be interpreted as "all"
|
||||
}
|
||||
}
|
||||
for (String prop : SPECIAL_JARS.values()) {
|
||||
if (System.getProperty(prop) == null) {
|
||||
System.setProperty(prop, "file:/dev/null"); // no chance to be interpreted as "all"
|
||||
}
|
||||
} else {
|
||||
// we could try to parse the classpath or something, but screw it for now.
|
||||
throw new UnsupportedOperationException("Unsupported system classloader type: " + loader.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -48,7 +48,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.DisableAllocationDecider;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
|
||||
@ -115,8 +114,7 @@ public class ClusterModule extends AbstractModule {
|
||||
RebalanceOnlyWhenActiveAllocationDecider.class,
|
||||
ClusterRebalanceAllocationDecider.class,
|
||||
ConcurrentRebalanceAllocationDecider.class,
|
||||
EnableAllocationDecider.class, // new enable allocation logic should proceed old disable allocation logic
|
||||
DisableAllocationDecider.class,
|
||||
EnableAllocationDecider.class,
|
||||
AwarenessAllocationDecider.class,
|
||||
ShardsLimitAllocationDecider.class,
|
||||
NodeVersionAllocationDecider.class,
|
||||
@ -156,9 +154,6 @@ public class ClusterModule extends AbstractModule {
|
||||
registerClusterDynamicSetting(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, Validator.INTEGER);
|
||||
registerClusterDynamicSetting(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY);
|
||||
registerClusterDynamicSetting(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, Validator.EMPTY);
|
||||
registerClusterDynamicSetting(DisableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_DISABLE_NEW_ALLOCATION, Validator.EMPTY);
|
||||
registerClusterDynamicSetting(DisableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_DISABLE_ALLOCATION, Validator.EMPTY);
|
||||
registerClusterDynamicSetting(DisableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_DISABLE_REPLICA_ALLOCATION, Validator.EMPTY);
|
||||
registerClusterDynamicSetting(ZenDiscovery.SETTING_REJOIN_ON_MASTER_GONE, Validator.BOOLEAN);
|
||||
registerClusterDynamicSetting(DiscoverySettings.NO_MASTER_BLOCK, Validator.EMPTY);
|
||||
registerClusterDynamicSetting(FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP + "*", Validator.EMPTY);
|
||||
@ -222,9 +217,6 @@ public class ClusterModule extends AbstractModule {
|
||||
registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "*", Validator.EMPTY);
|
||||
registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY);
|
||||
registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Validator.EMPTY);
|
||||
registerIndexDynamicSetting(DisableAllocationDecider.INDEX_ROUTING_ALLOCATION_DISABLE_ALLOCATION, Validator.EMPTY);
|
||||
registerIndexDynamicSetting(DisableAllocationDecider.INDEX_ROUTING_ALLOCATION_DISABLE_NEW_ALLOCATION, Validator.EMPTY);
|
||||
registerIndexDynamicSetting(DisableAllocationDecider.INDEX_ROUTING_ALLOCATION_DISABLE_REPLICA_ALLOCATION, Validator.EMPTY);
|
||||
registerIndexDynamicSetting(TranslogConfig.INDEX_TRANSLOG_FS_TYPE, Validator.EMPTY);
|
||||
registerIndexDynamicSetting(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, Validator.NON_NEGATIVE_INTEGER);
|
||||
registerIndexDynamicSetting(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, Validator.EMPTY);
|
||||
|
@ -133,12 +133,12 @@ public class NodeIndexDeletedAction extends AbstractComponent {
|
||||
}
|
||||
}
|
||||
|
||||
static class NodeIndexDeletedMessage extends TransportRequest {
|
||||
public static class NodeIndexDeletedMessage extends TransportRequest {
|
||||
|
||||
String index;
|
||||
String nodeId;
|
||||
|
||||
NodeIndexDeletedMessage() {
|
||||
public NodeIndexDeletedMessage() {
|
||||
}
|
||||
|
||||
NodeIndexDeletedMessage(String index, String nodeId) {
|
||||
@ -161,12 +161,12 @@ public class NodeIndexDeletedAction extends AbstractComponent {
|
||||
}
|
||||
}
|
||||
|
||||
static class NodeIndexStoreDeletedMessage extends TransportRequest {
|
||||
public static class NodeIndexStoreDeletedMessage extends TransportRequest {
|
||||
|
||||
String index;
|
||||
String nodeId;
|
||||
|
||||
NodeIndexStoreDeletedMessage() {
|
||||
public NodeIndexStoreDeletedMessage() {
|
||||
}
|
||||
|
||||
NodeIndexStoreDeletedMessage(String index, String nodeId) {
|
||||
|
@ -79,7 +79,7 @@ public class NodeMappingRefreshAction extends AbstractComponent {
|
||||
private String[] types;
|
||||
private String nodeId;
|
||||
|
||||
NodeMappingRefreshRequest() {
|
||||
public NodeMappingRefreshRequest() {
|
||||
}
|
||||
|
||||
public NodeMappingRefreshRequest(String index, String indexUUID, String[] types, String nodeId) {
|
||||
|
@ -244,7 +244,7 @@ public class ShardStateAction extends AbstractComponent {
|
||||
}
|
||||
}
|
||||
|
||||
static class ShardRoutingEntry extends TransportRequest {
|
||||
public static class ShardRoutingEntry extends TransportRequest {
|
||||
|
||||
ShardRouting shardRouting;
|
||||
String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE;
|
||||
@ -253,7 +253,7 @@ public class ShardStateAction extends AbstractComponent {
|
||||
|
||||
volatile boolean processed; // state field, no need to serialize
|
||||
|
||||
ShardRoutingEntry() {
|
||||
public ShardRoutingEntry() {
|
||||
}
|
||||
|
||||
ShardRoutingEntry(ShardRouting shardRouting, String indexUUID, String message, @Nullable Throwable failure) {
|
||||
|
@ -22,7 +22,6 @@ package org.elasticsearch.cluster.metadata;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
@ -33,7 +32,6 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodeFilters;
|
||||
import org.elasticsearch.cluster.routing.HashFunction;
|
||||
import org.elasticsearch.cluster.routing.Murmur3HashFunction;
|
||||
import org.elasticsearch.common.Classes;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
@ -218,19 +216,19 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
|
||||
this.totalNumberOfShards = numberOfShards() * (numberOfReplicas() + 1);
|
||||
this.aliases = aliases;
|
||||
|
||||
ImmutableMap<String, String> requireMap = settings.getByPrefix("index.routing.allocation.require.").getAsMap();
|
||||
Map<String, String> requireMap = settings.getByPrefix("index.routing.allocation.require.").getAsMap();
|
||||
if (requireMap.isEmpty()) {
|
||||
requireFilters = null;
|
||||
} else {
|
||||
requireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap);
|
||||
}
|
||||
ImmutableMap<String, String> includeMap = settings.getByPrefix("index.routing.allocation.include.").getAsMap();
|
||||
Map<String, String> includeMap = settings.getByPrefix("index.routing.allocation.include.").getAsMap();
|
||||
if (includeMap.isEmpty()) {
|
||||
includeFilters = null;
|
||||
} else {
|
||||
includeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap);
|
||||
}
|
||||
ImmutableMap<String, String> excludeMap = settings.getByPrefix("index.routing.allocation.exclude.").getAsMap();
|
||||
Map<String, String> excludeMap = settings.getByPrefix("index.routing.allocation.exclude.").getAsMap();
|
||||
if (excludeMap.isEmpty()) {
|
||||
excludeFilters = null;
|
||||
} else {
|
||||
|
@ -184,11 +184,9 @@ public class AwarenessAllocationDecider extends AllocationDecider {
|
||||
// build the count of shards per attribute value
|
||||
ObjectIntHashMap<String> shardPerAttribute = new ObjectIntHashMap<>();
|
||||
for (ShardRouting assignedShard : allocation.routingNodes().assignedShards(shardRouting)) {
|
||||
// if the shard is relocating, then make sure we count it as part of the node it is relocating to
|
||||
if (assignedShard.relocating()) {
|
||||
RoutingNode relocationNode = allocation.routingNodes().node(assignedShard.relocatingNodeId());
|
||||
shardPerAttribute.addTo(relocationNode.node().attributes().get(awarenessAttribute), 1);
|
||||
} else if (assignedShard.started() || assignedShard.initializing()) {
|
||||
if (assignedShard.started() || assignedShard.initializing()) {
|
||||
// Note: this also counts relocation targets as that will be the new location of the shard.
|
||||
// Relocation sources should not be counted as the shard is moving away
|
||||
RoutingNode routingNode = allocation.routingNodes().node(assignedShard.currentNodeId());
|
||||
shardPerAttribute.addTo(routingNode.node().attributes().get(awarenessAttribute), 1);
|
||||
}
|
||||
|
@ -1,133 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cluster.routing.allocation.decider;
|
||||
|
||||
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
|
||||
/**
|
||||
* This {@link AllocationDecider} prevents cluster-wide shard allocations. The
|
||||
* behavior of this {@link AllocationDecider} can be changed in real-time via
|
||||
* the cluster settings API. It respects the following settings:
|
||||
* <ul>
|
||||
* <li><tt>cluster.routing.allocation.disable_new_allocation</tt> - if set to
|
||||
* <code>true</code> no new shard-allocation are allowed. Note: this setting is
|
||||
* only applied if the allocated shard is a primary and it has not been
|
||||
* allocated before the this setting was applied.</li>
|
||||
* <p/>
|
||||
* <li><tt>cluster.routing.allocation.disable_allocation</tt> - if set to
|
||||
* <code>true</code> cluster wide allocations are disabled</li>
|
||||
* <p/>
|
||||
* <li><tt>cluster.routing.allocation.disable_replica_allocation</tt> - if set
|
||||
* to <code>true</code> cluster wide replica allocations are disabled while
|
||||
* primary shards can still be allocated</li>
|
||||
* </ul>
|
||||
* <p/>
|
||||
* <p>
|
||||
* Note: all of the above settings might be ignored if the allocation happens on
|
||||
* a shard that explicitly ignores disabled allocations via
|
||||
* {@link RoutingAllocation#ignoreDisable()}. Which is set if allocation are
|
||||
* explicit.
|
||||
* </p>
|
||||
*
|
||||
* @deprecated In favour for {@link EnableAllocationDecider}.
|
||||
*/
|
||||
@Deprecated
|
||||
public class DisableAllocationDecider extends AllocationDecider {
|
||||
|
||||
public static final String NAME = "disable";
|
||||
|
||||
public static final String CLUSTER_ROUTING_ALLOCATION_DISABLE_NEW_ALLOCATION = "cluster.routing.allocation.disable_new_allocation";
|
||||
public static final String CLUSTER_ROUTING_ALLOCATION_DISABLE_ALLOCATION = "cluster.routing.allocation.disable_allocation";
|
||||
public static final String CLUSTER_ROUTING_ALLOCATION_DISABLE_REPLICA_ALLOCATION = "cluster.routing.allocation.disable_replica_allocation";
|
||||
|
||||
public static final String INDEX_ROUTING_ALLOCATION_DISABLE_NEW_ALLOCATION = "index.routing.allocation.disable_new_allocation";
|
||||
public static final String INDEX_ROUTING_ALLOCATION_DISABLE_ALLOCATION = "index.routing.allocation.disable_allocation";
|
||||
public static final String INDEX_ROUTING_ALLOCATION_DISABLE_REPLICA_ALLOCATION = "index.routing.allocation.disable_replica_allocation";
|
||||
|
||||
class ApplySettings implements NodeSettingsService.Listener {
|
||||
@Override
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
boolean disableNewAllocation = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_DISABLE_NEW_ALLOCATION, DisableAllocationDecider.this.disableNewAllocation);
|
||||
if (disableNewAllocation != DisableAllocationDecider.this.disableNewAllocation) {
|
||||
logger.info("updating [cluster.routing.allocation.disable_new_allocation] from [{}] to [{}]", DisableAllocationDecider.this.disableNewAllocation, disableNewAllocation);
|
||||
DisableAllocationDecider.this.disableNewAllocation = disableNewAllocation;
|
||||
}
|
||||
|
||||
boolean disableAllocation = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_DISABLE_ALLOCATION, DisableAllocationDecider.this.disableAllocation);
|
||||
if (disableAllocation != DisableAllocationDecider.this.disableAllocation) {
|
||||
logger.info("updating [cluster.routing.allocation.disable_allocation] from [{}] to [{}]", DisableAllocationDecider.this.disableAllocation, disableAllocation);
|
||||
DisableAllocationDecider.this.disableAllocation = disableAllocation;
|
||||
}
|
||||
|
||||
boolean disableReplicaAllocation = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_DISABLE_REPLICA_ALLOCATION, DisableAllocationDecider.this.disableReplicaAllocation);
|
||||
if (disableReplicaAllocation != DisableAllocationDecider.this.disableReplicaAllocation) {
|
||||
logger.info("updating [cluster.routing.allocation.disable_replica_allocation] from [{}] to [{}]", DisableAllocationDecider.this.disableReplicaAllocation, disableReplicaAllocation);
|
||||
DisableAllocationDecider.this.disableReplicaAllocation = disableReplicaAllocation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private volatile boolean disableNewAllocation;
|
||||
private volatile boolean disableAllocation;
|
||||
private volatile boolean disableReplicaAllocation;
|
||||
|
||||
@Inject
|
||||
public DisableAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) {
|
||||
super(settings);
|
||||
this.disableNewAllocation = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_DISABLE_NEW_ALLOCATION, false);
|
||||
this.disableAllocation = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_DISABLE_ALLOCATION, false);
|
||||
this.disableReplicaAllocation = settings.getAsBoolean(CLUSTER_ROUTING_ALLOCATION_DISABLE_REPLICA_ALLOCATION, false);
|
||||
|
||||
nodeSettingsService.addListener(new ApplySettings());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
|
||||
if (allocation.ignoreDisable()) {
|
||||
return allocation.decision(Decision.YES, NAME, "allocation disabling is ignored");
|
||||
}
|
||||
Settings indexSettings = allocation.routingNodes().metaData().index(shardRouting.index()).settings();
|
||||
if (shardRouting.primary() && shardRouting.allocatedPostIndexCreate() == false) {
|
||||
// if its primary, and it hasn't been allocated post API (meaning its a "fresh newly created shard"), only disable allocation
|
||||
// on a special disable allocation flag
|
||||
if (indexSettings.getAsBoolean(INDEX_ROUTING_ALLOCATION_DISABLE_NEW_ALLOCATION, disableNewAllocation)) {
|
||||
return allocation.decision(Decision.NO, NAME, "new primary allocation is disabled");
|
||||
} else {
|
||||
return allocation.decision(Decision.YES, NAME, "new primary allocation is enabled");
|
||||
}
|
||||
}
|
||||
if (indexSettings.getAsBoolean(INDEX_ROUTING_ALLOCATION_DISABLE_ALLOCATION, disableAllocation)) {
|
||||
return allocation.decision(Decision.NO, NAME, "all allocation is disabled");
|
||||
}
|
||||
if (indexSettings.getAsBoolean(INDEX_ROUTING_ALLOCATION_DISABLE_REPLICA_ALLOCATION, disableReplicaAllocation)) {
|
||||
if (shardRouting.primary()) {
|
||||
return allocation.decision(Decision.YES, NAME, "primary allocation is enabled");
|
||||
} else {
|
||||
return allocation.decision(Decision.NO, NAME, "replica allocation is disabled");
|
||||
}
|
||||
}
|
||||
return allocation.decision(Decision.YES, NAME, "all allocation is enabled");
|
||||
}
|
||||
}
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.cluster.routing.allocation.decider;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodeFilters;
|
||||
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||
@ -29,6 +28,8 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND;
|
||||
import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR;
|
||||
|
||||
@ -77,19 +78,19 @@ public class FilterAllocationDecider extends AllocationDecider {
|
||||
@Inject
|
||||
public FilterAllocationDecider(Settings settings, NodeSettingsService nodeSettingsService) {
|
||||
super(settings);
|
||||
ImmutableMap<String, String> requireMap = settings.getByPrefix(CLUSTER_ROUTING_REQUIRE_GROUP).getAsMap();
|
||||
Map<String, String> requireMap = settings.getByPrefix(CLUSTER_ROUTING_REQUIRE_GROUP).getAsMap();
|
||||
if (requireMap.isEmpty()) {
|
||||
clusterRequireFilters = null;
|
||||
} else {
|
||||
clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap);
|
||||
}
|
||||
ImmutableMap<String, String> includeMap = settings.getByPrefix(CLUSTER_ROUTING_INCLUDE_GROUP).getAsMap();
|
||||
Map<String, String> includeMap = settings.getByPrefix(CLUSTER_ROUTING_INCLUDE_GROUP).getAsMap();
|
||||
if (includeMap.isEmpty()) {
|
||||
clusterIncludeFilters = null;
|
||||
} else {
|
||||
clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap);
|
||||
}
|
||||
ImmutableMap<String, String> excludeMap = settings.getByPrefix(CLUSTER_ROUTING_EXCLUDE_GROUP).getAsMap();
|
||||
Map<String, String> excludeMap = settings.getByPrefix(CLUSTER_ROUTING_EXCLUDE_GROUP).getAsMap();
|
||||
if (excludeMap.isEmpty()) {
|
||||
clusterExcludeFilters = null;
|
||||
} else {
|
||||
@ -148,15 +149,15 @@ public class FilterAllocationDecider extends AllocationDecider {
|
||||
class ApplySettings implements NodeSettingsService.Listener {
|
||||
@Override
|
||||
public void onRefreshSettings(Settings settings) {
|
||||
ImmutableMap<String, String> requireMap = settings.getByPrefix(CLUSTER_ROUTING_REQUIRE_GROUP).getAsMap();
|
||||
Map<String, String> requireMap = settings.getByPrefix(CLUSTER_ROUTING_REQUIRE_GROUP).getAsMap();
|
||||
if (!requireMap.isEmpty()) {
|
||||
clusterRequireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap);
|
||||
}
|
||||
ImmutableMap<String, String> includeMap = settings.getByPrefix(CLUSTER_ROUTING_INCLUDE_GROUP).getAsMap();
|
||||
Map<String, String> includeMap = settings.getByPrefix(CLUSTER_ROUTING_INCLUDE_GROUP).getAsMap();
|
||||
if (!includeMap.isEmpty()) {
|
||||
clusterIncludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap);
|
||||
}
|
||||
ImmutableMap<String, String> excludeMap = settings.getByPrefix(CLUSTER_ROUTING_EXCLUDE_GROUP).getAsMap();
|
||||
Map<String, String> excludeMap = settings.getByPrefix(CLUSTER_ROUTING_EXCLUDE_GROUP).getAsMap();
|
||||
if (!excludeMap.isEmpty()) {
|
||||
clusterExcludeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, excludeMap);
|
||||
}
|
||||
|
@ -19,11 +19,10 @@
|
||||
|
||||
package org.elasticsearch.common.blobstore;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
|
||||
@ -37,14 +36,19 @@ public interface BlobContainer {
|
||||
boolean blobExists(String blobName);
|
||||
|
||||
/**
|
||||
* Creates a new {@link InputStream} for the given blob name
|
||||
* Creates a new InputStream for the given blob name
|
||||
*/
|
||||
InputStream openInput(String blobName) throws IOException;
|
||||
InputStream readBlob(String blobName) throws IOException;
|
||||
|
||||
/**
|
||||
* Creates a new OutputStream for the given blob name
|
||||
* Reads blob content from the input stream and writes it to the blob store
|
||||
*/
|
||||
OutputStream createOutput(String blobName) throws IOException;
|
||||
void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException;
|
||||
|
||||
/**
|
||||
* Writes bytes to the blob
|
||||
*/
|
||||
void writeBlob(String blobName, BytesReference bytes) throws IOException;
|
||||
|
||||
/**
|
||||
* Deletes a blob with giving name.
|
||||
|
@ -25,7 +25,9 @@ import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
|
||||
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.DirectoryStream;
|
||||
@ -83,25 +85,28 @@ public class FsBlobContainer extends AbstractBlobContainer {
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream openInput(String name) throws IOException {
|
||||
public InputStream readBlob(String name) throws IOException {
|
||||
return new BufferedInputStream(Files.newInputStream(path.resolve(name)), blobStore.bufferSizeInBytes());
|
||||
}
|
||||
|
||||
@Override
|
||||
public OutputStream createOutput(String blobName) throws IOException {
|
||||
public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException {
|
||||
final Path file = path.resolve(blobName);
|
||||
return new BufferedOutputStream(new FilterOutputStream(Files.newOutputStream(file)) {
|
||||
try (OutputStream outputStream = Files.newOutputStream(file)) {
|
||||
Streams.copy(inputStream, outputStream, new byte[blobStore.bufferSizeInBytes()]);
|
||||
}
|
||||
IOUtils.fsync(file, false);
|
||||
IOUtils.fsync(path, true);
|
||||
}
|
||||
|
||||
@Override // FilterOutputStream#write(byte[] b, int off, int len) is trappy writes every single byte
|
||||
public void write(byte[] b, int off, int len) throws IOException { out.write(b, off, len);}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
super.close();
|
||||
IOUtils.fsync(file, false);
|
||||
IOUtils.fsync(path, true);
|
||||
}
|
||||
}, blobStore.bufferSizeInBytes());
|
||||
@Override
|
||||
public void writeBlob(String blobName, BytesReference data) throws IOException {
|
||||
final Path file = path.resolve(blobName);
|
||||
try (OutputStream outputStream = Files.newOutputStream(file)) {
|
||||
data.writeTo(outputStream);
|
||||
}
|
||||
IOUtils.fsync(file, false);
|
||||
IOUtils.fsync(path, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.common.blobstore.support;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
|
@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.blobstore.support;
|
||||
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* Temporary compatibility interface.
|
||||
*
|
||||
* This class should be removed after S3 and Azure containers migrate to the new model
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class AbstractLegacyBlobContainer extends AbstractBlobContainer {
|
||||
|
||||
protected AbstractLegacyBlobContainer(BlobPath path) {
|
||||
super(path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link InputStream} for the given blob name
|
||||
* <p/>
|
||||
* This method is deprecated and is used only for compatibility with older blob containers
|
||||
* The new blob containers should use readBlob/writeBlob methods instead
|
||||
*/
|
||||
@Deprecated
|
||||
protected abstract InputStream openInput(String blobName) throws IOException;
|
||||
|
||||
/**
|
||||
* Creates a new OutputStream for the given blob name
|
||||
* <p/>
|
||||
* This method is deprecated and is used only for compatibility with older blob containers
|
||||
* The new blob containers should override readBlob/writeBlob methods instead
|
||||
*/
|
||||
@Deprecated
|
||||
protected abstract OutputStream createOutput(String blobName) throws IOException;
|
||||
|
||||
@Override
|
||||
public InputStream readBlob(String blobName) throws IOException {
|
||||
return openInput(blobName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException {
|
||||
try (OutputStream stream = createOutput(blobName)) {
|
||||
Streams.copy(inputStream, stream);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBlob(String blobName, BytesReference data) throws IOException {
|
||||
try (OutputStream stream = createOutput(blobName)) {
|
||||
data.writeTo(stream);
|
||||
}
|
||||
}
|
||||
}
|
@ -23,11 +23,11 @@ import com.google.common.collect.ImmutableMap;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.net.URL;
|
||||
|
||||
/**
|
||||
@ -99,12 +99,17 @@ public class URLBlobContainer extends AbstractBlobContainer {
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream openInput(String name) throws IOException {
|
||||
public InputStream readBlob(String name) throws IOException {
|
||||
return new BufferedInputStream(new URL(path, name).openStream(), blobStore.bufferSizeInBytes());
|
||||
}
|
||||
|
||||
@Override
|
||||
public OutputStream createOutput(String blobName) throws IOException {
|
||||
public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException {
|
||||
throw new UnsupportedOperationException("URL repository doesn't support this operation");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBlob(String blobName, BytesReference data) throws IOException {
|
||||
throw new UnsupportedOperationException("URL repository doesn't support this operation");
|
||||
}
|
||||
}
|
||||
|
@ -1,481 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
|
||||
/**
|
||||
* Utilities for encoding and decoding geohashes. Based on
|
||||
* http://en.wikipedia.org/wiki/Geohash.
|
||||
*/
|
||||
// LUCENE MONITOR: monitor against spatial package
|
||||
// replaced with native DECODE_MAP
|
||||
public class GeoHashUtils {
|
||||
|
||||
private static final char[] BASE_32 = {'0', '1', '2', '3', '4', '5', '6',
|
||||
'7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n',
|
||||
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
|
||||
|
||||
public static final int PRECISION = 12;
|
||||
private static final int[] BITS = {16, 8, 4, 2, 1};
|
||||
|
||||
private GeoHashUtils() {
|
||||
}
|
||||
|
||||
public static String encode(double latitude, double longitude) {
|
||||
return encode(latitude, longitude, PRECISION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes the given latitude and longitude into a geohash
|
||||
*
|
||||
* @param latitude Latitude to encode
|
||||
* @param longitude Longitude to encode
|
||||
* @return Geohash encoding of the longitude and latitude
|
||||
*/
|
||||
public static String encode(double latitude, double longitude, int precision) {
|
||||
// double[] latInterval = {-90.0, 90.0};
|
||||
// double[] lngInterval = {-180.0, 180.0};
|
||||
double latInterval0 = -90.0;
|
||||
double latInterval1 = 90.0;
|
||||
double lngInterval0 = -180.0;
|
||||
double lngInterval1 = 180.0;
|
||||
|
||||
final StringBuilder geohash = new StringBuilder();
|
||||
boolean isEven = true;
|
||||
|
||||
int bit = 0;
|
||||
int ch = 0;
|
||||
|
||||
while (geohash.length() < precision) {
|
||||
double mid = 0.0;
|
||||
if (isEven) {
|
||||
// mid = (lngInterval[0] + lngInterval[1]) / 2D;
|
||||
mid = (lngInterval0 + lngInterval1) / 2D;
|
||||
if (longitude > mid) {
|
||||
ch |= BITS[bit];
|
||||
// lngInterval[0] = mid;
|
||||
lngInterval0 = mid;
|
||||
} else {
|
||||
// lngInterval[1] = mid;
|
||||
lngInterval1 = mid;
|
||||
}
|
||||
} else {
|
||||
// mid = (latInterval[0] + latInterval[1]) / 2D;
|
||||
mid = (latInterval0 + latInterval1) / 2D;
|
||||
if (latitude > mid) {
|
||||
ch |= BITS[bit];
|
||||
// latInterval[0] = mid;
|
||||
latInterval0 = mid;
|
||||
} else {
|
||||
// latInterval[1] = mid;
|
||||
latInterval1 = mid;
|
||||
}
|
||||
}
|
||||
|
||||
isEven = !isEven;
|
||||
|
||||
if (bit < 4) {
|
||||
bit++;
|
||||
} else {
|
||||
geohash.append(BASE_32[ch]);
|
||||
bit = 0;
|
||||
ch = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return geohash.toString();
|
||||
}
|
||||
|
||||
private static final char encode(int x, int y) {
|
||||
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate all neighbors of a given geohash cell.
|
||||
*
|
||||
* @param geohash Geohash of the defined cell
|
||||
* @return geohashes of all neighbor cells
|
||||
*/
|
||||
public static Collection<? extends CharSequence> neighbors(String geohash) {
|
||||
return addNeighbors(geohash, geohash.length(), new ArrayList<CharSequence>(8));
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the geohash of a neighbor of a geohash
|
||||
*
|
||||
* @param geohash the geohash of a cell
|
||||
* @param level level of the geohash
|
||||
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
|
||||
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
|
||||
* @return geohash of the defined cell
|
||||
*/
|
||||
private final static String neighbor(String geohash, int level, int dx, int dy) {
|
||||
int cell = decode(geohash.charAt(level - 1));
|
||||
|
||||
// Decoding the Geohash bit pattern to determine grid coordinates
|
||||
int x0 = cell & 1; // first bit of x
|
||||
int y0 = cell & 2; // first bit of y
|
||||
int x1 = cell & 4; // second bit of x
|
||||
int y1 = cell & 8; // second bit of y
|
||||
int x2 = cell & 16; // third bit of x
|
||||
|
||||
// combine the bitpattern to grid coordinates.
|
||||
// note that the semantics of x and y are swapping
|
||||
// on each level
|
||||
int x = x0 + (x1 / 2) + (x2 / 4);
|
||||
int y = (y0 / 2) + (y1 / 4);
|
||||
|
||||
if (level == 1) {
|
||||
// Root cells at north (namely "bcfguvyz") or at
|
||||
// south (namely "0145hjnp") do not have neighbors
|
||||
// in north/south direction
|
||||
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
|
||||
return null;
|
||||
} else {
|
||||
return Character.toString(encode(x + dx, y + dy));
|
||||
}
|
||||
} else {
|
||||
// define grid coordinates for next level
|
||||
final int nx = ((level % 2) == 1) ? (x + dx) : (x + dy);
|
||||
final int ny = ((level % 2) == 1) ? (y + dy) : (y + dx);
|
||||
|
||||
// if the defined neighbor has the same parent a the current cell
|
||||
// encode the cell directly. Otherwise find the cell next to this
|
||||
// cell recursively. Since encoding wraps around within a cell
|
||||
// it can be encoded here.
|
||||
// xLimit and YLimit must always be respectively 7 and 3
|
||||
// since x and y semantics are swapping on each level.
|
||||
if (nx >= 0 && nx <= 7 && ny >= 0 && ny <= 3) {
|
||||
return geohash.substring(0, level - 1) + encode(nx, ny);
|
||||
} else {
|
||||
String neighbor = neighbor(geohash, level - 1, dx, dy);
|
||||
if(neighbor != null) {
|
||||
return neighbor + encode(nx, ny);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
*/
|
||||
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
|
||||
return addNeighbors(geohash, geohash.length(), neighbors);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param length level of the given geohash
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
*/
|
||||
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, int length, E neighbors) {
|
||||
String south = neighbor(geohash, length, 0, -1);
|
||||
String north = neighbor(geohash, length, 0, +1);
|
||||
if (north != null) {
|
||||
neighbors.add(neighbor(north, length, -1, 0));
|
||||
neighbors.add(north);
|
||||
neighbors.add(neighbor(north, length, +1, 0));
|
||||
}
|
||||
|
||||
neighbors.add(neighbor(geohash, length, -1, 0));
|
||||
neighbors.add(neighbor(geohash, length, +1, 0));
|
||||
|
||||
if (south != null) {
|
||||
neighbors.add(neighbor(south, length, -1, 0));
|
||||
neighbors.add(south);
|
||||
neighbors.add(neighbor(south, length, +1, 0));
|
||||
}
|
||||
|
||||
return neighbors;
|
||||
}
|
||||
|
||||
private static final int decode(char geo) {
|
||||
switch (geo) {
|
||||
case '0':
|
||||
return 0;
|
||||
case '1':
|
||||
return 1;
|
||||
case '2':
|
||||
return 2;
|
||||
case '3':
|
||||
return 3;
|
||||
case '4':
|
||||
return 4;
|
||||
case '5':
|
||||
return 5;
|
||||
case '6':
|
||||
return 6;
|
||||
case '7':
|
||||
return 7;
|
||||
case '8':
|
||||
return 8;
|
||||
case '9':
|
||||
return 9;
|
||||
case 'b':
|
||||
return 10;
|
||||
case 'c':
|
||||
return 11;
|
||||
case 'd':
|
||||
return 12;
|
||||
case 'e':
|
||||
return 13;
|
||||
case 'f':
|
||||
return 14;
|
||||
case 'g':
|
||||
return 15;
|
||||
case 'h':
|
||||
return 16;
|
||||
case 'j':
|
||||
return 17;
|
||||
case 'k':
|
||||
return 18;
|
||||
case 'm':
|
||||
return 19;
|
||||
case 'n':
|
||||
return 20;
|
||||
case 'p':
|
||||
return 21;
|
||||
case 'q':
|
||||
return 22;
|
||||
case 'r':
|
||||
return 23;
|
||||
case 's':
|
||||
return 24;
|
||||
case 't':
|
||||
return 25;
|
||||
case 'u':
|
||||
return 26;
|
||||
case 'v':
|
||||
return 27;
|
||||
case 'w':
|
||||
return 28;
|
||||
case 'x':
|
||||
return 29;
|
||||
case 'y':
|
||||
return 30;
|
||||
case 'z':
|
||||
return 31;
|
||||
default:
|
||||
throw new IllegalArgumentException("the character '" + geo + "' is not a valid geohash character");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes the given geohash
|
||||
*
|
||||
* @param geohash Geohash to decocde
|
||||
* @return {@link GeoPoint} at the center of cell, given by the geohash
|
||||
*/
|
||||
public static GeoPoint decode(String geohash) {
|
||||
return decode(geohash, new GeoPoint());
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes the given geohash into a latitude and longitude
|
||||
*
|
||||
* @param geohash Geohash to decocde
|
||||
* @return the given {@link GeoPoint} reseted to the center of
|
||||
* cell, given by the geohash
|
||||
*/
|
||||
public static GeoPoint decode(String geohash, GeoPoint ret) {
|
||||
double[] interval = decodeCell(geohash);
|
||||
return ret.reset((interval[0] + interval[1]) / 2D, (interval[2] + interval[3]) / 2D);
|
||||
}
|
||||
|
||||
private static double[] decodeCell(String geohash) {
|
||||
double[] interval = {-90.0, 90.0, -180.0, 180.0};
|
||||
boolean isEven = true;
|
||||
|
||||
for (int i = 0; i < geohash.length(); i++) {
|
||||
final int cd = decode(geohash.charAt(i));
|
||||
|
||||
for (int mask : BITS) {
|
||||
if (isEven) {
|
||||
if ((cd & mask) != 0) {
|
||||
interval[2] = (interval[2] + interval[3]) / 2D;
|
||||
} else {
|
||||
interval[3] = (interval[2] + interval[3]) / 2D;
|
||||
}
|
||||
} else {
|
||||
if ((cd & mask) != 0) {
|
||||
interval[0] = (interval[0] + interval[1]) / 2D;
|
||||
} else {
|
||||
interval[1] = (interval[0] + interval[1]) / 2D;
|
||||
}
|
||||
}
|
||||
isEven = !isEven;
|
||||
}
|
||||
}
|
||||
return interval;
|
||||
}
|
||||
|
||||
//========== long-based encodings for geohashes ========================================
|
||||
|
||||
|
||||
/**
|
||||
* Encodes latitude and longitude information into a single long with variable precision.
|
||||
* Up to 12 levels of precision are supported which should offer sub-metre resolution.
|
||||
*
|
||||
* @param latitude
|
||||
* @param longitude
|
||||
* @param precision The required precision between 1 and 12
|
||||
* @return A single long where 4 bits are used for holding the precision and the remaining
|
||||
* 60 bits are reserved for 5 bit cell identifiers giving up to 12 layers.
|
||||
*/
|
||||
public static long encodeAsLong(double latitude, double longitude, int precision) {
|
||||
if((precision>12)||(precision<1))
|
||||
{
|
||||
throw new IllegalArgumentException("Illegal precision length of "+precision+
|
||||
". Long-based geohashes only support precisions between 1 and 12");
|
||||
}
|
||||
double latInterval0 = -90.0;
|
||||
double latInterval1 = 90.0;
|
||||
double lngInterval0 = -180.0;
|
||||
double lngInterval1 = 180.0;
|
||||
|
||||
long geohash = 0l;
|
||||
boolean isEven = true;
|
||||
|
||||
int bit = 0;
|
||||
int ch = 0;
|
||||
|
||||
int geohashLength=0;
|
||||
while (geohashLength < precision) {
|
||||
double mid = 0.0;
|
||||
if (isEven) {
|
||||
mid = (lngInterval0 + lngInterval1) / 2D;
|
||||
if (longitude > mid) {
|
||||
ch |= BITS[bit];
|
||||
lngInterval0 = mid;
|
||||
} else {
|
||||
lngInterval1 = mid;
|
||||
}
|
||||
} else {
|
||||
mid = (latInterval0 + latInterval1) / 2D;
|
||||
if (latitude > mid) {
|
||||
ch |= BITS[bit];
|
||||
latInterval0 = mid;
|
||||
} else {
|
||||
latInterval1 = mid;
|
||||
}
|
||||
}
|
||||
|
||||
isEven = !isEven;
|
||||
|
||||
if (bit < 4) {
|
||||
bit++;
|
||||
} else {
|
||||
geohashLength++;
|
||||
geohash|=ch;
|
||||
if(geohashLength<precision){
|
||||
geohash<<=5;
|
||||
}
|
||||
bit = 0;
|
||||
ch = 0;
|
||||
}
|
||||
}
|
||||
geohash<<=4;
|
||||
geohash|=precision;
|
||||
return geohash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a geohash held as a long as a more conventional
|
||||
* String-based geohash
|
||||
* @param geohashAsLong a geohash encoded as a long
|
||||
* @return A traditional base32-based String representation of a geohash
|
||||
*/
|
||||
public static String toString(long geohashAsLong)
|
||||
{
|
||||
int precision = (int) (geohashAsLong&15);
|
||||
char[] chars = new char[precision];
|
||||
geohashAsLong >>= 4;
|
||||
for (int i = precision - 1; i >= 0 ; i--) {
|
||||
chars[i] = BASE_32[(int) (geohashAsLong & 31)];
|
||||
geohashAsLong >>= 5;
|
||||
}
|
||||
return new String(chars);
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static GeoPoint decode(long geohash) {
|
||||
GeoPoint point = new GeoPoint();
|
||||
decode(geohash, point);
|
||||
return point;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes the given long-format geohash into a latitude and longitude
|
||||
*
|
||||
* @param geohash long format Geohash to decode
|
||||
* @param ret The Geopoint into which the latitude and longitude will be stored
|
||||
*/
|
||||
public static void decode(long geohash, GeoPoint ret) {
|
||||
double[] interval = decodeCell(geohash);
|
||||
ret.reset((interval[0] + interval[1]) / 2D, (interval[2] + interval[3]) / 2D);
|
||||
|
||||
}
|
||||
|
||||
private static double[] decodeCell(long geohash) {
|
||||
double[] interval = {-90.0, 90.0, -180.0, 180.0};
|
||||
boolean isEven = true;
|
||||
|
||||
int precision= (int) (geohash&15);
|
||||
geohash>>=4;
|
||||
int[]cds=new int[precision];
|
||||
for (int i = precision-1; i >=0 ; i--) {
|
||||
cds[i] = (int) (geohash&31);
|
||||
geohash>>=5;
|
||||
}
|
||||
|
||||
for (int i = 0; i <cds.length ; i++) {
|
||||
final int cd = cds[i];
|
||||
for (int mask : BITS) {
|
||||
if (isEven) {
|
||||
if ((cd & mask) != 0) {
|
||||
interval[2] = (interval[2] + interval[3]) / 2D;
|
||||
} else {
|
||||
interval[3] = (interval[2] + interval[3]) / 2D;
|
||||
}
|
||||
} else {
|
||||
if ((cd & mask) != 0) {
|
||||
interval[0] = (interval[0] + interval[1]) / 2D;
|
||||
} else {
|
||||
interval[1] = (interval[0] + interval[1]) / 2D;
|
||||
}
|
||||
}
|
||||
isEven = !isEven;
|
||||
}
|
||||
}
|
||||
return interval;
|
||||
}
|
||||
}
|
@ -20,6 +20,10 @@
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
|
||||
import org.apache.lucene.util.BitUtil;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.XGeoUtils;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@ -27,6 +31,7 @@ public final class GeoPoint {
|
||||
|
||||
private double lat;
|
||||
private double lon;
|
||||
private final static double TOLERANCE = XGeoUtils.TOLERANCE;
|
||||
|
||||
public GeoPoint() {
|
||||
}
|
||||
@ -34,7 +39,7 @@ public final class GeoPoint {
|
||||
/**
|
||||
* Create a new Geopointform a string. This String must either be a geohash
|
||||
* or a lat-lon tuple.
|
||||
*
|
||||
*
|
||||
* @param value String to create the point from
|
||||
*/
|
||||
public GeoPoint(String value) {
|
||||
@ -73,11 +78,22 @@ public final class GeoPoint {
|
||||
return this;
|
||||
}
|
||||
|
||||
public GeoPoint resetFromGeoHash(String hash) {
|
||||
GeoHashUtils.decode(hash, this);
|
||||
public GeoPoint resetFromIndexHash(long hash) {
|
||||
lon = XGeoUtils.mortonUnhashLon(hash);
|
||||
lat = XGeoUtils.mortonUnhashLat(hash);
|
||||
return this;
|
||||
}
|
||||
|
||||
public GeoPoint resetFromGeoHash(String geohash) {
|
||||
final long hash = XGeoHashUtils.mortonEncode(geohash);
|
||||
return this.reset(XGeoUtils.mortonUnhashLat(hash), XGeoUtils.mortonUnhashLon(hash));
|
||||
}
|
||||
|
||||
public GeoPoint resetFromGeoHash(long geohashLong) {
|
||||
final int level = (int)(12 - (geohashLong&15));
|
||||
return this.resetFromIndexHash(BitUtil.flipFlop((geohashLong >>> 4) << ((level * 5) + 2)));
|
||||
}
|
||||
|
||||
public final double lat() {
|
||||
return this.lat;
|
||||
}
|
||||
@ -95,11 +111,11 @@ public final class GeoPoint {
|
||||
}
|
||||
|
||||
public final String geohash() {
|
||||
return GeoHashUtils.encode(lat, lon);
|
||||
return XGeoHashUtils.stringEncode(lon, lat);
|
||||
}
|
||||
|
||||
public final String getGeohash() {
|
||||
return GeoHashUtils.encode(lat, lon);
|
||||
return XGeoHashUtils.stringEncode(lon, lat);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -107,10 +123,14 @@ public final class GeoPoint {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
GeoPoint geoPoint = (GeoPoint) o;
|
||||
final GeoPoint geoPoint = (GeoPoint) o;
|
||||
final double lonCompare = geoPoint.lon - lon;
|
||||
final double latCompare = geoPoint.lat - lat;
|
||||
|
||||
if (Double.compare(geoPoint.lat, lat) != 0) return false;
|
||||
if (Double.compare(geoPoint.lon, lon) != 0) return false;
|
||||
if ((lonCompare < -TOLERANCE || lonCompare > TOLERANCE)
|
||||
|| (latCompare < -TOLERANCE || latCompare > TOLERANCE)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -136,4 +156,16 @@ public final class GeoPoint {
|
||||
point.resetFromString(latLon);
|
||||
return point;
|
||||
}
|
||||
}
|
||||
|
||||
public static GeoPoint fromGeohash(String geohash) {
|
||||
return new GeoPoint().resetFromGeoHash(geohash);
|
||||
}
|
||||
|
||||
public static GeoPoint fromGeohash(long geohashLong) {
|
||||
return new GeoPoint().resetFromGeoHash(geohashLong);
|
||||
}
|
||||
|
||||
public static GeoPoint fromIndexLong(long indexLong) {
|
||||
return new GeoPoint().resetFromIndexHash(indexLong);
|
||||
}
|
||||
}
|
@ -43,12 +43,6 @@ class DefaultConstructionProxyFactory<T> implements ConstructionProxyFactory<T>
|
||||
@SuppressWarnings("unchecked") // the injection point is for a constructor of T
|
||||
final Constructor<T> constructor = (Constructor<T>) injectionPoint.getMember();
|
||||
|
||||
// Use FastConstructor if the constructor is public.
|
||||
if (Modifier.isPublic(constructor.getModifiers())) {
|
||||
} else {
|
||||
constructor.setAccessible(true);
|
||||
}
|
||||
|
||||
return new ConstructionProxy<T>() {
|
||||
@Override
|
||||
public T newInstance(Object... arguments) throws InvocationTargetException {
|
||||
@ -57,7 +51,7 @@ class DefaultConstructionProxyFactory<T> implements ConstructionProxyFactory<T>
|
||||
} catch (InstantiationException e) {
|
||||
throw new AssertionError(e); // shouldn't happen, we know this is a concrete type
|
||||
} catch (IllegalAccessException e) {
|
||||
throw new AssertionError(e); // a security manager is blocking us, we're hosed
|
||||
throw new AssertionError("Wrong access modifiers on " + constructor, e); // a security manager is blocking us, we're hosed
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -39,9 +39,6 @@ class SingleFieldInjector implements SingleMemberInjector {
|
||||
this.injectionPoint = injectionPoint;
|
||||
this.field = (Field) injectionPoint.getMember();
|
||||
this.dependency = injectionPoint.getDependencies().get(0);
|
||||
|
||||
// Ewwwww...
|
||||
field.setAccessible(true);
|
||||
factory = injector.getInternalFactory(dependency.getKey(), errors);
|
||||
}
|
||||
|
||||
|
@ -49,10 +49,6 @@ class SingleMethodInjector implements SingleMemberInjector {
|
||||
if (!Modifier.isPrivate(modifiers) && !Modifier.isProtected(modifiers)) {
|
||||
}
|
||||
|
||||
if (!Modifier.isPublic(modifiers)) {
|
||||
method.setAccessible(true);
|
||||
}
|
||||
|
||||
return new MethodInvoker() {
|
||||
@Override
|
||||
public Object invoke(Object target, Object... parameters)
|
||||
|
@ -88,7 +88,6 @@ class AssistedConstructor<T> {
|
||||
* supplied arguments.
|
||||
*/
|
||||
public T newInstance(Object[] args) throws Throwable {
|
||||
constructor.setAccessible(true);
|
||||
try {
|
||||
return constructor.newInstance(args);
|
||||
} catch (InvocationTargetException e) {
|
||||
|
@ -53,7 +53,7 @@ import static org.elasticsearch.common.inject.internal.Annotations.getKey;
|
||||
* @author jessewilson@google.com (Jesse Wilson)
|
||||
* @author dtm@google.com (Daniel Martin)
|
||||
*/
|
||||
final class FactoryProvider2<F> implements InvocationHandler, Provider<F> {
|
||||
public final class FactoryProvider2<F> implements InvocationHandler, Provider<F> {
|
||||
|
||||
/**
|
||||
* if a factory method parameter isn't annotated, it gets this annotation.
|
||||
@ -173,7 +173,7 @@ final class FactoryProvider2<F> implements InvocationHandler, Provider<F> {
|
||||
* all factory methods will be able to build the target types.
|
||||
*/
|
||||
@Inject
|
||||
void initialize(Injector injector) {
|
||||
public void initialize(Injector injector) {
|
||||
if (this.injector != null) {
|
||||
throw new ConfigurationException(Collections.singletonList(new Message(FactoryProvider2.class,
|
||||
"Factories.create() factories may only be used in one Injector!")));
|
||||
|
@ -54,8 +54,6 @@ public class ProviderMethod<T> implements ProviderWithDependencies<T> {
|
||||
this.method = method;
|
||||
this.parameterProviders = parameterProviders;
|
||||
this.exposed = method.getAnnotation(Exposed.class) != null;
|
||||
|
||||
method.setAccessible(true);
|
||||
}
|
||||
|
||||
public Key<T> getKey() {
|
||||
|
@ -17,6 +17,7 @@
|
||||
package org.elasticsearch.common.inject.multibindings;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
|
||||
import org.elasticsearch.common.inject.*;
|
||||
import org.elasticsearch.common.inject.binder.LinkedBindingBuilder;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder.RealMultibinder;
|
||||
@ -227,7 +228,7 @@ public abstract class MapBinder<K, V> {
|
||||
* <p/>
|
||||
* <p>We use a subclass to hide 'implements Module' from the public API.
|
||||
*/
|
||||
private static final class RealMapBinder<K, V> extends MapBinder<K, V> implements Module {
|
||||
public static final class RealMapBinder<K, V> extends MapBinder<K, V> implements Module {
|
||||
private final TypeLiteral<V> valueType;
|
||||
private final Key<Map<K, V>> mapKey;
|
||||
private final Key<Map<K, Provider<V>>> providerMapKey;
|
||||
@ -260,8 +261,48 @@ public abstract class MapBinder<K, V> {
|
||||
binder.getProvider(valueKey)));
|
||||
return binder.bind(valueKey);
|
||||
}
|
||||
|
||||
public static class MapBinderProviderWithDependencies<K,V> implements ProviderWithDependencies<Map<K, Provider<V>>> {
|
||||
private Map<K, Provider<V>> providerMap;
|
||||
|
||||
@SuppressWarnings("rawtypes") // code is silly stupid with generics
|
||||
private final RealMapBinder binder;
|
||||
private final Set<Dependency<?>> dependencies;
|
||||
private final Provider<Set<Entry<K, Provider<V>>>> provider;
|
||||
|
||||
@SuppressWarnings("rawtypes") // code is silly stupid with generics
|
||||
MapBinderProviderWithDependencies(RealMapBinder binder, Set<Dependency<?>> dependencies, Provider<Set<Entry<K, Provider<V>>>> provider) {
|
||||
this.binder = binder;
|
||||
this.dependencies = dependencies;
|
||||
this.provider = provider;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({"unchecked", "unused"}) // code is silly stupid with generics
|
||||
@Inject
|
||||
public void initialize() {
|
||||
binder.binder = null;
|
||||
|
||||
Map<K, Provider<V>> providerMapMutable = new LinkedHashMap<>();
|
||||
for (Entry<K, Provider<V>> entry : provider.get()) {
|
||||
Multibinder.checkConfiguration(providerMapMutable.put(entry.getKey(), entry.getValue()) == null,
|
||||
"Map injection failed due to duplicated key \"%s\"", entry.getKey());
|
||||
}
|
||||
|
||||
providerMap = Collections.unmodifiableMap(providerMapMutable);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<K, Provider<V>> get() {
|
||||
return providerMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Dependency<?>> getDependencies() {
|
||||
return dependencies;
|
||||
}
|
||||
}
|
||||
|
||||
@Override @SuppressWarnings({"rawtypes", "unchecked"}) // code is silly stupid with generics
|
||||
public void configure(Binder binder) {
|
||||
Multibinder.checkConfiguration(!isInitialized(), "MapBinder was already initialized");
|
||||
|
||||
@ -271,33 +312,7 @@ public abstract class MapBinder<K, V> {
|
||||
// binds a Map<K, Provider<V>> from a collection of Map<Entry<K, Provider<V>>
|
||||
final Provider<Set<Entry<K, Provider<V>>>> entrySetProvider = binder
|
||||
.getProvider(entrySetBinder.getSetKey());
|
||||
binder.bind(providerMapKey).toProvider(new ProviderWithDependencies<Map<K, Provider<V>>>() {
|
||||
private Map<K, Provider<V>> providerMap;
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
@Inject
|
||||
void initialize() {
|
||||
RealMapBinder.this.binder = null;
|
||||
|
||||
Map<K, Provider<V>> providerMapMutable = new LinkedHashMap<>();
|
||||
for (Entry<K, Provider<V>> entry : entrySetProvider.get()) {
|
||||
Multibinder.checkConfiguration(providerMapMutable.put(entry.getKey(), entry.getValue()) == null,
|
||||
"Map injection failed due to duplicated key \"%s\"", entry.getKey());
|
||||
}
|
||||
|
||||
providerMap = Collections.unmodifiableMap(providerMapMutable);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<K, Provider<V>> get() {
|
||||
return providerMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Dependency<?>> getDependencies() {
|
||||
return dependencies;
|
||||
}
|
||||
});
|
||||
binder.bind(providerMapKey).toProvider(new MapBinderProviderWithDependencies(RealMapBinder.this, dependencies, entrySetProvider));
|
||||
|
||||
final Provider<Map<K, Provider<V>>> mapProvider = binder.getProvider(providerMapKey);
|
||||
binder.bind(mapKey).toProvider(new ProviderWithDependencies<Map<K, V>>() {
|
||||
|
@ -193,7 +193,7 @@ public abstract class Multibinder<T> {
|
||||
* <p>We use a subclass to hide 'implements Module, Provider' from the public
|
||||
* API.
|
||||
*/
|
||||
static final class RealMultibinder<T> extends Multibinder<T>
|
||||
public static final class RealMultibinder<T> extends Multibinder<T>
|
||||
implements Module, Provider<Set<T>>, HasDependencies {
|
||||
|
||||
private final TypeLiteral<T> elementType;
|
||||
@ -236,7 +236,7 @@ public abstract class Multibinder<T> {
|
||||
* contents are only evaluated when get() is invoked.
|
||||
*/
|
||||
@Inject
|
||||
void initialize(Injector injector) {
|
||||
public void initialize(Injector injector) {
|
||||
providers = new ArrayList<>();
|
||||
List<Dependency<?>> dependencies = new ArrayList<>();
|
||||
for (Binding<?> entry : injector.findBindingsByType(elementType)) {
|
||||
|
@ -43,8 +43,8 @@ public final class PathUtils {
|
||||
/** the actual JDK default */
|
||||
static final FileSystem ACTUAL_DEFAULT = FileSystems.getDefault();
|
||||
|
||||
/** can be changed by tests (via reflection) */
|
||||
private static volatile FileSystem DEFAULT = ACTUAL_DEFAULT;
|
||||
/** can be changed by tests */
|
||||
static volatile FileSystem DEFAULT = ACTUAL_DEFAULT;
|
||||
|
||||
/**
|
||||
* Returns a {@code Path} from name components.
|
||||
|
@ -64,6 +64,9 @@ public final class AllTermQuery extends Query {
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
if (getBoost() != 1f) {
|
||||
return super.rewrite(reader);
|
||||
}
|
||||
boolean fieldExists = false;
|
||||
boolean hasPayloads = false;
|
||||
for (LeafReaderContext context : reader.leaves()) {
|
||||
@ -98,7 +101,7 @@ public final class AllTermQuery extends Query {
|
||||
final CollectionStatistics collectionStats = searcher.collectionStatistics(term.field());
|
||||
final TermStatistics termStats = searcher.termStatistics(term, termStates);
|
||||
final Similarity similarity = searcher.getSimilarity(needsScores);
|
||||
final SimWeight stats = similarity.computeWeight(getBoost(), collectionStats, termStats);
|
||||
final SimWeight stats = similarity.computeWeight(collectionStats, termStats);
|
||||
return new Weight(this) {
|
||||
|
||||
@Override
|
||||
|
@ -120,6 +120,9 @@ public class MultiPhrasePrefixQuery extends Query {
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
if (getBoost() != 1.0F) {
|
||||
return super.rewrite(reader);
|
||||
}
|
||||
if (termArrays.isEmpty()) {
|
||||
return new MatchNoDocsQuery();
|
||||
}
|
||||
|
@ -1,70 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene.search.function;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@Deprecated
|
||||
public class BoostScoreFunction extends ScoreFunction {
|
||||
|
||||
public static final String BOOST_WEIGHT_ERROR_MESSAGE = "'boost_factor' and 'weight' cannot be used together. Use 'weight'.";
|
||||
|
||||
private final float boost;
|
||||
|
||||
public BoostScoreFunction(float boost) {
|
||||
super(CombineFunction.MULT);
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
public float getBoost() {
|
||||
return boost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) {
|
||||
return new LeafScoreFunction() {
|
||||
|
||||
@Override
|
||||
public double score(int docId, float subQueryScore) {
|
||||
return boost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explainScore(int docId, Explanation subQueryScore) {
|
||||
return Explanation.match(boost, "static boost factor", Explanation.match(boost, "boostFactor"));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "boost[" + boost + "]";
|
||||
}
|
||||
|
||||
}
|
@ -24,8 +24,8 @@ import org.apache.lucene.search.Explanation;
|
||||
public enum CombineFunction {
|
||||
MULT {
|
||||
@Override
|
||||
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat(queryBoost * queryScore * Math.min(funcScore, maxBoost));
|
||||
public float combine(double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat(queryScore * Math.min(funcScore, maxBoost));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -34,21 +34,20 @@ public enum CombineFunction {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
float score = queryBoost * Math.min(funcExpl.getValue(), maxBoost) * queryExpl.getValue();
|
||||
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
Explanation boostExpl = Explanation.match(maxBoost, "maxBoost");
|
||||
Explanation minExpl = Explanation.match(
|
||||
Math.min(funcExpl.getValue(), maxBoost),
|
||||
"min of:",
|
||||
funcExpl, boostExpl);
|
||||
return Explanation.match(score, "function score, product of:",
|
||||
queryExpl, minExpl, Explanation.match(queryBoost, "queryBoost"));
|
||||
return Explanation.match(queryExpl.getValue() * minExpl.getValue(),
|
||||
"function score, product of:", queryExpl, minExpl);
|
||||
}
|
||||
},
|
||||
REPLACE {
|
||||
@Override
|
||||
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat(queryBoost * Math.min(funcScore, maxBoost));
|
||||
public float combine(double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat(Math.min(funcScore, maxBoost));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -57,22 +56,19 @@ public enum CombineFunction {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
float score = queryBoost * Math.min(funcExpl.getValue(), maxBoost);
|
||||
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
Explanation boostExpl = Explanation.match(maxBoost, "maxBoost");
|
||||
Explanation minExpl = Explanation.match(
|
||||
return Explanation.match(
|
||||
Math.min(funcExpl.getValue(), maxBoost),
|
||||
"min of:",
|
||||
funcExpl, boostExpl);
|
||||
return Explanation.match(score, "function score, product of:",
|
||||
minExpl, Explanation.match(queryBoost, "queryBoost"));
|
||||
}
|
||||
|
||||
},
|
||||
SUM {
|
||||
@Override
|
||||
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat(queryBoost * (queryScore + Math.min(funcScore, maxBoost)));
|
||||
public float combine(double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat(queryScore + Math.min(funcScore, maxBoost));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -81,21 +77,18 @@ public enum CombineFunction {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
float score = queryBoost * (Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue());
|
||||
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:",
|
||||
funcExpl, Explanation.match(maxBoost, "maxBoost"));
|
||||
Explanation sumExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue(), "sum of",
|
||||
return Explanation.match(Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue(), "sum of",
|
||||
queryExpl, minExpl);
|
||||
return Explanation.match(score, "function score, product of:",
|
||||
sumExpl, Explanation.match(queryBoost, "queryBoost"));
|
||||
}
|
||||
|
||||
},
|
||||
AVG {
|
||||
@Override
|
||||
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat((queryBoost * (Math.min(funcScore, maxBoost) + queryScore) / 2.0));
|
||||
public float combine(double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat((Math.min(funcScore, maxBoost) + queryScore) / 2.0);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -104,22 +97,19 @@ public enum CombineFunction {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
float score = toFloat(queryBoost * (queryExpl.getValue() + Math.min(funcExpl.getValue(), maxBoost)) / 2.0);
|
||||
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:",
|
||||
funcExpl, Explanation.match(maxBoost, "maxBoost"));
|
||||
Explanation avgExpl = Explanation.match(
|
||||
return Explanation.match(
|
||||
toFloat((Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue()) / 2.0), "avg of",
|
||||
queryExpl, minExpl);
|
||||
return Explanation.match(score, "function score, product of:",
|
||||
avgExpl, Explanation.match(queryBoost, "queryBoost"));
|
||||
}
|
||||
|
||||
},
|
||||
MIN {
|
||||
@Override
|
||||
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat(queryBoost * Math.min(queryScore, Math.min(funcScore, maxBoost)));
|
||||
public float combine(double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat(Math.min(queryScore, Math.min(funcScore, maxBoost)));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -128,23 +118,20 @@ public enum CombineFunction {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
float score = toFloat(queryBoost * Math.min(queryExpl.getValue(), Math.min(funcExpl.getValue(), maxBoost)));
|
||||
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
Explanation innerMinExpl = Explanation.match(
|
||||
Math.min(funcExpl.getValue(), maxBoost), "min of:",
|
||||
funcExpl, Explanation.match(maxBoost, "maxBoost"));
|
||||
Explanation outerMinExpl = Explanation.match(
|
||||
return Explanation.match(
|
||||
Math.min(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "min of",
|
||||
queryExpl, innerMinExpl);
|
||||
return Explanation.match(score, "function score, product of:",
|
||||
outerMinExpl, Explanation.match(queryBoost, "queryBoost"));
|
||||
}
|
||||
|
||||
},
|
||||
MAX {
|
||||
@Override
|
||||
public float combine(double queryBoost, double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat(queryBoost * (Math.max(queryScore, Math.min(funcScore, maxBoost))));
|
||||
public float combine(double queryScore, double funcScore, double maxBoost) {
|
||||
return toFloat(Math.max(queryScore, Math.min(funcScore, maxBoost)));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -153,21 +140,18 @@ public enum CombineFunction {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
float score = toFloat(queryBoost * Math.max(queryExpl.getValue(), Math.min(funcExpl.getValue(), maxBoost)));
|
||||
public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) {
|
||||
Explanation innerMinExpl = Explanation.match(
|
||||
Math.min(funcExpl.getValue(), maxBoost), "min of:",
|
||||
funcExpl, Explanation.match(maxBoost, "maxBoost"));
|
||||
Explanation outerMaxExpl = Explanation.match(
|
||||
return Explanation.match(
|
||||
Math.max(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "max of:",
|
||||
queryExpl, innerMinExpl);
|
||||
return Explanation.match(score, "function score, product of:",
|
||||
outerMaxExpl, Explanation.match(queryBoost, "queryBoost"));
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
public abstract float combine(double queryBoost, double queryScore, double funcScore, double maxBoost);
|
||||
public abstract float combine(double queryScore, double funcScore, double maxBoost);
|
||||
|
||||
public abstract String getName();
|
||||
|
||||
@ -181,5 +165,5 @@ public enum CombineFunction {
|
||||
return Double.compare(floatVersion, input) == 0 || input == 0.0d ? 0 : 1.d - (floatVersion) / input;
|
||||
}
|
||||
|
||||
public abstract Explanation explain(float queryBoost, Explanation queryExpl, Explanation funcExpl, float maxBoost);
|
||||
public abstract Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost);
|
||||
}
|
||||
|
@ -21,13 +21,11 @@ package org.elasticsearch.common.lucene.search.function;
|
||||
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
abstract class CustomBoostFactorScorer extends Scorer {
|
||||
|
||||
final float subQueryBoost;
|
||||
final Scorer scorer;
|
||||
final float maxBoost;
|
||||
final CombineFunction scoreCombiner;
|
||||
@ -43,7 +41,6 @@ abstract class CustomBoostFactorScorer extends Scorer {
|
||||
} else {
|
||||
nextDoc = new MinScoreNextDoc();
|
||||
}
|
||||
this.subQueryBoost = w.getQuery().getBoost();
|
||||
this.scorer = scorer;
|
||||
this.maxBoost = maxBoost;
|
||||
this.scoreCombiner = scoreCombiner;
|
||||
|
@ -114,6 +114,9 @@ public class FiltersFunctionScoreQuery extends Query {
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
if (getBoost() != 1.0F) {
|
||||
return super.rewrite(reader);
|
||||
}
|
||||
Query newQ = subQuery.rewrite(reader);
|
||||
if (newQ == subQuery)
|
||||
return this;
|
||||
@ -158,14 +161,12 @@ public class FiltersFunctionScoreQuery extends Query {
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
float sum = subQueryWeight.getValueForNormalization();
|
||||
sum *= getBoost() * getBoost();
|
||||
return sum;
|
||||
return subQueryWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float topLevelBoost) {
|
||||
subQueryWeight.normalize(norm, topLevelBoost * getBoost());
|
||||
public void normalize(float norm, float boost) {
|
||||
subQueryWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -219,10 +220,7 @@ public class FiltersFunctionScoreQuery extends Query {
|
||||
}
|
||||
}
|
||||
if (filterExplanations.size() == 0) {
|
||||
float sc = getBoost() * subQueryExpl.getValue();
|
||||
return Explanation.match(sc, "function score, no filter match, product of:",
|
||||
subQueryExpl,
|
||||
Explanation.match(getBoost(), "queryBoost"));
|
||||
return subQueryExpl;
|
||||
}
|
||||
|
||||
// Second: Compute the factor that would have been computed by the
|
||||
@ -266,7 +264,7 @@ public class FiltersFunctionScoreQuery extends Query {
|
||||
CombineFunction.toFloat(factor),
|
||||
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]",
|
||||
filterExplanations);
|
||||
return combineFunction.explain(getBoost(), subQueryExpl, factorExplanation, maxBoost);
|
||||
return combineFunction.explain(subQueryExpl, factorExplanation, maxBoost);
|
||||
}
|
||||
}
|
||||
|
||||
@ -348,7 +346,7 @@ public class FiltersFunctionScoreQuery extends Query {
|
||||
}
|
||||
}
|
||||
}
|
||||
return scoreCombiner.combine(subQueryBoost, subQueryScore, factor, maxBoost);
|
||||
return scoreCombiner.combine(subQueryScore, factor, maxBoost);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,6 +76,9 @@ public class FunctionScoreQuery extends Query {
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
if (getBoost() != 1.0F) {
|
||||
return super.rewrite(reader);
|
||||
}
|
||||
Query newQ = subQuery.rewrite(reader);
|
||||
if (newQ == subQuery) {
|
||||
return this;
|
||||
@ -117,14 +120,12 @@ public class FunctionScoreQuery extends Query {
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
float sum = subQueryWeight.getValueForNormalization();
|
||||
sum *= getBoost() * getBoost();
|
||||
return sum;
|
||||
return subQueryWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float topLevelBoost) {
|
||||
subQueryWeight.normalize(norm, topLevelBoost * getBoost());
|
||||
public void normalize(float norm, float boost) {
|
||||
subQueryWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -148,7 +149,7 @@ public class FunctionScoreQuery extends Query {
|
||||
}
|
||||
if (function != null) {
|
||||
Explanation functionExplanation = function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl);
|
||||
return combineFunction.explain(getBoost(), subQueryExpl, functionExplanation, maxBoost);
|
||||
return combineFunction.explain(subQueryExpl, functionExplanation, maxBoost);
|
||||
} else {
|
||||
return subQueryExpl;
|
||||
}
|
||||
@ -174,9 +175,9 @@ public class FunctionScoreQuery extends Query {
|
||||
// are needed
|
||||
float score = needsScores ? scorer.score() : 0f;
|
||||
if (function == null) {
|
||||
return subQueryBoost * score;
|
||||
return score;
|
||||
} else {
|
||||
return scoreCombiner.combine(subQueryBoost, score,
|
||||
return scoreCombiner.combine(score,
|
||||
function.score(scorer.docID(), score), maxBoost);
|
||||
}
|
||||
}
|
||||
|
@ -35,9 +35,6 @@ public class WeightFactorFunction extends ScoreFunction {
|
||||
|
||||
public WeightFactorFunction(float weight, ScoreFunction scoreFunction) {
|
||||
super(CombineFunction.MULT);
|
||||
if (scoreFunction instanceof BoostScoreFunction) {
|
||||
throw new IllegalArgumentException(BoostScoreFunction.BOOST_WEIGHT_ERROR_MESSAGE);
|
||||
}
|
||||
if (scoreFunction == null) {
|
||||
this.scoreFunction = SCORE_ONE;
|
||||
} else {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user