Merge remote-tracking branch 'es/master' into feature/ingest

This commit is contained in:
Martijn van Groningen 2015-10-29 12:46:02 +07:00
commit b0836d5d07
214 changed files with 2282 additions and 3145 deletions

View File

@ -214,7 +214,12 @@ mvn test -Dtests.heap.size=512m
Pass arbitrary jvm arguments. Pass arbitrary jvm arguments.
------------------------------ ------------------------------
# specify heap dump path
mvn test -Dtests.jvm.argline="-XX:HeapDumpPath=/path/to/heapdumps" mvn test -Dtests.jvm.argline="-XX:HeapDumpPath=/path/to/heapdumps"
# enable gc logging
mvn test -Dtests.jvm.argline="-verbose:gc"
# enable security debugging
mvn test -Dtests.jvm.argline="-Djava.security.debug=access,failure"
------------------------------ ------------------------------
== Backwards Compatibility Tests == Backwards Compatibility Tests

View File

@ -236,7 +236,7 @@
<includes> <includes>
<include>org/elasticsearch/test/**/*</include> <include>org/elasticsearch/test/**/*</include>
<include>org/elasticsearch/bootstrap/BootstrapForTesting.class</include> <include>org/elasticsearch/bootstrap/BootstrapForTesting.class</include>
<include>org/elasticsearch/bootstrap/MockPluginPolicy.class</include> <include>org/elasticsearch/bootstrap/BootstrapForTesting$*.class</include>
<include>org/elasticsearch/common/cli/CliToolTestCase.class</include> <include>org/elasticsearch/common/cli/CliToolTestCase.class</include>
<include>org/elasticsearch/common/cli/CliToolTestCase$*.class</include> <include>org/elasticsearch/common/cli/CliToolTestCase$*.class</include>
</includes> </includes>
@ -265,7 +265,7 @@
<include>rest-api-spec/**/*</include> <include>rest-api-spec/**/*</include>
<include>org/elasticsearch/test/**/*</include> <include>org/elasticsearch/test/**/*</include>
<include>org/elasticsearch/bootstrap/BootstrapForTesting.class</include> <include>org/elasticsearch/bootstrap/BootstrapForTesting.class</include>
<include>org/elasticsearch/bootstrap/MockPluginPolicy.class</include> <include>org/elasticsearch/bootstrap/BootstrapForTesting$*.class</include>
<include>org/elasticsearch/common/cli/CliToolTestCase.class</include> <include>org/elasticsearch/common/cli/CliToolTestCase.class</include>
<include>org/elasticsearch/common/cli/CliToolTestCase$*.class</include> <include>org/elasticsearch/common/cli/CliToolTestCase$*.class</include>
<include>org/elasticsearch/cluster/MockInternalClusterInfoService.class</include> <include>org/elasticsearch/cluster/MockInternalClusterInfoService.class</include>

View File

@ -1,279 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.util;
import java.util.ArrayList;
import java.util.Collection;
/**
* Utilities for converting to/from the GeoHash standard
*
* The geohash long format is represented as lon/lat (x/y) interleaved with the 4 least significant bits
* representing the level (1-12) [xyxy...xyxyllll]
*
* This differs from a morton encoded value which interleaves lat/lon (y/x).
*
* @lucene.experimental
*/
public class XGeoHashUtils {
public static final char[] BASE_32 = {'0', '1', '2', '3', '4', '5', '6',
'7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n',
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
public static final String BASE_32_STRING = new String(BASE_32);
public static final int PRECISION = 12;
private static final short MORTON_OFFSET = (XGeoUtils.BITS<<1) - (PRECISION*5);
/**
* Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
*/
public static final long longEncode(final double lon, final double lat, final int level) {
// shift to appropriate level
final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET);
return ((BitUtil.flipFlop(XGeoUtils.mortonHash(lon, lat)) >>> msf) << 4) | level;
}
/**
* Encode from geohash string to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
*/
public static final long longEncode(final String hash) {
int level = hash.length()-1;
long b;
long l = 0L;
for(char c : hash.toCharArray()) {
b = (long)(BASE_32_STRING.indexOf(c));
l |= (b<<(level--*5));
}
return (l<<4)|hash.length();
}
/**
* Encode an existing geohash long to the provided precision
*/
public static long longEncode(long geohash, int level) {
final short precision = (short)(geohash & 15);
if (precision == level) {
return geohash;
} else if (precision > level) {
return ((geohash >>> (((precision - level) * 5) + 4)) << 4) | level;
}
return ((geohash >>> 4) << (((level - precision) * 5) + 4) | level);
}
/**
* Encode to a geohash string from the geohash based long format
*/
public static final String stringEncode(long geoHashLong) {
int level = (int)geoHashLong&15;
geoHashLong >>>= 4;
char[] chars = new char[level];
do {
chars[--level] = BASE_32[(int)(geoHashLong&31L)];
geoHashLong>>>=5;
} while(level > 0);
return new String(chars);
}
/**
* Encode to a geohash string from full resolution longitude, latitude)
*/
public static final String stringEncode(final double lon, final double lat) {
return stringEncode(lon, lat, 12);
}
/**
* Encode to a level specific geohash string from full resolution longitude, latitude
*/
public static final String stringEncode(final double lon, final double lat, final int level) {
// bit twiddle to geohash (since geohash is a swapped (lon/lat) encoding)
final long hashedVal = BitUtil.flipFlop(XGeoUtils.mortonHash(lon, lat));
StringBuilder geoHash = new StringBuilder();
short precision = 0;
final short msf = (XGeoUtils.BITS<<1)-5;
long mask = 31L<<msf;
do {
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
// next 5 bits
mask >>>= 5;
} while (++precision < level);
return geoHash.toString();
}
/**
* Encode to a full precision geohash string from a given morton encoded long value
*/
public static final String stringEncodeFromMortonLong(final long hashedVal) throws Exception {
return stringEncode(hashedVal, PRECISION);
}
/**
* Encode to a geohash string at a given level from a morton long
*/
public static final String stringEncodeFromMortonLong(long hashedVal, final int level) {
// bit twiddle to geohash (since geohash is a swapped (lon/lat) encoding)
hashedVal = BitUtil.flipFlop(hashedVal);
StringBuilder geoHash = new StringBuilder();
short precision = 0;
final short msf = (XGeoUtils.BITS<<1)-5;
long mask = 31L<<msf;
do {
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
// next 5 bits
mask >>>= 5;
} while (++precision < level);
return geoHash.toString();
}
/**
* Encode to a morton long value from a given geohash string
*/
public static final long mortonEncode(final String hash) {
int level = 11;
long b;
long l = 0L;
for(char c : hash.toCharArray()) {
b = (long)(BASE_32_STRING.indexOf(c));
l |= (b<<((level--*5) + MORTON_OFFSET));
}
return BitUtil.flipFlop(l);
}
/**
* Encode to a morton long value from a given geohash long value
*/
public static final long mortonEncode(final long geoHashLong) {
final int level = (int)(geoHashLong&15);
final short odd = (short)(level & 1);
return BitUtil.flipFlop((geoHashLong >>> 4) << odd) << (((12 - level) * 5) + (MORTON_OFFSET - odd));
}
private static final char encode(int x, int y) {
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
}
/**
* Calculate all neighbors of a given geohash cell.
*
* @param geohash Geohash of the defined cell
* @return geohashes of all neighbor cells
*/
public static Collection<? extends CharSequence> neighbors(String geohash) {
return addNeighbors(geohash, geohash.length(), new ArrayList<CharSequence>(8));
}
/**
* Calculate the geohash of a neighbor of a geohash
*
* @param geohash the geohash of a cell
* @param level level of the geohash
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
* @return geohash of the defined cell
*/
private final static String neighbor(String geohash, int level, int dx, int dy) {
int cell = BASE_32_STRING.indexOf(geohash.charAt(level -1));
// Decoding the Geohash bit pattern to determine grid coordinates
int x0 = cell & 1; // first bit of x
int y0 = cell & 2; // first bit of y
int x1 = cell & 4; // second bit of x
int y1 = cell & 8; // second bit of y
int x2 = cell & 16; // third bit of x
// combine the bitpattern to grid coordinates.
// note that the semantics of x and y are swapping
// on each level
int x = x0 + (x1 / 2) + (x2 / 4);
int y = (y0 / 2) + (y1 / 4);
if (level == 1) {
// Root cells at north (namely "bcfguvyz") or at
// south (namely "0145hjnp") do not have neighbors
// in north/south direction
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
return null;
} else {
return Character.toString(encode(x + dx, y + dy));
}
} else {
// define grid coordinates for next level
final int nx = ((level % 2) == 1) ? (x + dx) : (x + dy);
final int ny = ((level % 2) == 1) ? (y + dy) : (y + dx);
// if the defined neighbor has the same parent a the current cell
// encode the cell directly. Otherwise find the cell next to this
// cell recursively. Since encoding wraps around within a cell
// it can be encoded here.
// xLimit and YLimit must always be respectively 7 and 3
// since x and y semantics are swapping on each level.
if (nx >= 0 && nx <= 7 && ny >= 0 && ny <= 3) {
return geohash.substring(0, level - 1) + encode(nx, ny);
} else {
String neighbor = neighbor(geohash, level - 1, dx, dy);
return (neighbor != null) ? neighbor + encode(nx, ny) : neighbor;
}
}
}
/**
* Add all geohashes of the cells next to a given geohash to a list.
*
* @param geohash Geohash of a specified cell
* @param neighbors list to add the neighbors to
* @return the given list
*/
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
return addNeighbors(geohash, geohash.length(), neighbors);
}
/**
* Add all geohashes of the cells next to a given geohash to a list.
*
* @param geohash Geohash of a specified cell
* @param length level of the given geohash
* @param neighbors list to add the neighbors to
* @return the given list
*/
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, int length, E neighbors) {
String south = neighbor(geohash, length, 0, -1);
String north = neighbor(geohash, length, 0, +1);
if (north != null) {
neighbors.add(neighbor(north, length, -1, 0));
neighbors.add(north);
neighbors.add(neighbor(north, length, +1, 0));
}
neighbors.add(neighbor(geohash, length, -1, 0));
neighbors.add(neighbor(geohash, length, +1, 0));
if (south != null) {
neighbors.add(neighbor(south, length, -1, 0));
neighbors.add(south);
neighbors.add(neighbor(south, length, +1, 0));
}
return neighbors;
}
}

View File

@ -1,383 +0,0 @@
package org.apache.lucene.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Reusable geo-spatial projection utility methods.
*
* @lucene.experimental
*/
public class XGeoProjectionUtils {
// WGS84 earth-ellipsoid major (a) minor (b) radius, (f) flattening and eccentricity (e)
static final double SEMIMAJOR_AXIS = 6_378_137; // [m]
static final double FLATTENING = 1.0/298.257223563;
static final double SEMIMINOR_AXIS = SEMIMAJOR_AXIS * (1.0 - FLATTENING); //6_356_752.31420; // [m]
static final double ECCENTRICITY = StrictMath.sqrt((2.0 - FLATTENING) * FLATTENING);
static final double PI_OVER_2 = StrictMath.PI / 2.0D;
static final double SEMIMAJOR_AXIS2 = SEMIMAJOR_AXIS * SEMIMAJOR_AXIS;
static final double SEMIMINOR_AXIS2 = SEMIMINOR_AXIS * SEMIMINOR_AXIS;
/**
* Converts from geocentric earth-centered earth-fixed to geodesic lat/lon/alt
* @param x Cartesian x coordinate
* @param y Cartesian y coordinate
* @param z Cartesian z coordinate
* @param lla 0: longitude 1: latitude: 2: altitude
* @return double array as 0: longitude 1: latitude 2: altitude
*/
public static final double[] ecfToLLA(final double x, final double y, final double z, double[] lla) {
boolean atPole = false;
final double ad_c = 1.0026000D;
final double e2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMAJOR_AXIS2);
final double ep2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMINOR_AXIS2);
final double cos67P5 = 0.38268343236508977D;
if (lla == null) {
lla = new double[3];
}
if (x != 0.0) {
lla[0] = StrictMath.atan2(y,x);
} else {
if (y > 0) {
lla[0] = PI_OVER_2;
} else if (y < 0) {
lla[0] = -PI_OVER_2;
} else {
atPole = true;
lla[0] = 0.0D;
if (z > 0.0) {
lla[1] = PI_OVER_2;
} else if (z < 0.0) {
lla[1] = -PI_OVER_2;
} else {
lla[1] = PI_OVER_2;
lla[2] = -SEMIMINOR_AXIS;
return lla;
}
}
}
final double w2 = x*x + y*y;
final double w = StrictMath.sqrt(w2);
final double t0 = z * ad_c;
final double s0 = StrictMath.sqrt(t0 * t0 + w2);
final double sinB0 = t0 / s0;
final double cosB0 = w / s0;
final double sin3B0 = sinB0 * sinB0 * sinB0;
final double t1 = z + SEMIMINOR_AXIS * ep2 * sin3B0;
final double sum = w - SEMIMAJOR_AXIS * e2 * cosB0 * cosB0 * cosB0;
final double s1 = StrictMath.sqrt(t1 * t1 + sum * sum);
final double sinP1 = t1 / s1;
final double cosP1 = sum / s1;
final double rn = SEMIMAJOR_AXIS / StrictMath.sqrt(1.0D - e2 * sinP1 * sinP1);
if (cosP1 >= cos67P5) {
lla[2] = w / cosP1 - rn;
} else if (cosP1 <= -cos67P5) {
lla[2] = w / -cosP1 - rn;
} else {
lla[2] = z / sinP1 + rn * (e2 - 1.0);
}
if (!atPole) {
lla[1] = StrictMath.atan(sinP1/cosP1);
}
lla[0] = StrictMath.toDegrees(lla[0]);
lla[1] = StrictMath.toDegrees(lla[1]);
return lla;
}
/**
* Converts from geodesic lon lat alt to geocentric earth-centered earth-fixed
* @param lon geodesic longitude
* @param lat geodesic latitude
* @param alt geodesic altitude
* @param ecf reusable earth-centered earth-fixed result
* @return either a new ecef array or the reusable ecf parameter
*/
public static final double[] llaToECF(double lon, double lat, double alt, double[] ecf) {
lon = StrictMath.toRadians(lon);
lat = StrictMath.toRadians(lat);
final double sl = StrictMath.sin(lat);
final double s2 = sl*sl;
final double cl = StrictMath.cos(lat);
final double ge2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMAJOR_AXIS2);
if (ecf == null) {
ecf = new double[3];
}
if (lat < -PI_OVER_2 && lat > -1.001D * PI_OVER_2) {
lat = -PI_OVER_2;
} else if (lat > PI_OVER_2 && lat < 1.001D * PI_OVER_2) {
lat = PI_OVER_2;
}
assert (lat >= -PI_OVER_2) || (lat <= PI_OVER_2);
if (lon > StrictMath.PI) {
lon -= (2*StrictMath.PI);
}
final double rn = SEMIMAJOR_AXIS / StrictMath.sqrt(1.0D - ge2 * s2);
ecf[0] = (rn+alt) * cl * StrictMath.cos(lon);
ecf[1] = (rn+alt) * cl * StrictMath.sin(lon);
ecf[2] = ((rn*(1.0-ge2))+alt)*sl;
return ecf;
}
/**
* Converts from lat lon alt (in degrees) to East North Up right-hand coordinate system
* @param lon longitude in degrees
* @param lat latitude in degrees
* @param alt altitude in meters
* @param centerLon reference point longitude in degrees
* @param centerLat reference point latitude in degrees
* @param centerAlt reference point altitude in meters
* @param enu result east, north, up coordinate
* @return east, north, up coordinate
*/
public static double[] llaToENU(final double lon, final double lat, final double alt, double centerLon,
double centerLat, final double centerAlt, double[] enu) {
if (enu == null) {
enu = new double[3];
}
// convert point to ecf coordinates
final double[] ecf = llaToECF(lon, lat, alt, null);
// convert from ecf to enu
return ecfToENU(ecf[0], ecf[1], ecf[2], centerLon, centerLat, centerAlt, enu);
}
/**
* Converts from East North Up right-hand rule to lat lon alt in degrees
* @param x easting (in meters)
* @param y northing (in meters)
* @param z up (in meters)
* @param centerLon reference point longitude (in degrees)
* @param centerLat reference point latitude (in degrees)
* @param centerAlt reference point altitude (in meters)
* @param lla resulting lat, lon, alt point (in degrees)
* @return lat, lon, alt point (in degrees)
*/
public static double[] enuToLLA(final double x, final double y, final double z, final double centerLon,
final double centerLat, final double centerAlt, double[] lla) {
// convert enuToECF
if (lla == null) {
lla = new double[3];
}
// convert enuToECF, storing intermediate result in lla
lla = enuToECF(x, y, z, centerLon, centerLat, centerAlt, lla);
// convert ecf to LLA
return ecfToLLA(lla[0], lla[1], lla[2], lla);
}
/**
* Convert from Earth-Centered-Fixed to Easting, Northing, Up Right Hand System
* @param x ECF X coordinate (in meters)
* @param y ECF Y coordinate (in meters)
* @param z ECF Z coordinate (in meters)
* @param centerLon ENU origin longitude (in degrees)
* @param centerLat ENU origin latitude (in degrees)
* @param centerAlt ENU altitude (in meters)
* @param enu reusable enu result
* @return Easting, Northing, Up coordinate
*/
public static double[] ecfToENU(double x, double y, double z, final double centerLon,
final double centerLat, final double centerAlt, double[] enu) {
if (enu == null) {
enu = new double[3];
}
// create rotation matrix and rotate to enu orientation
final double[][] phi = createPhiTransform(centerLon, centerLat, null);
// convert origin to ENU
final double[] originECF = llaToECF(centerLon, centerLat, centerAlt, null);
final double[] originENU = new double[3];
originENU[0] = ((phi[0][0] * originECF[0]) + (phi[0][1] * originECF[1]) + (phi[0][2] * originECF[2]));
originENU[1] = ((phi[1][0] * originECF[0]) + (phi[1][1] * originECF[1]) + (phi[1][2] * originECF[2]));
originENU[2] = ((phi[2][0] * originECF[0]) + (phi[2][1] * originECF[1]) + (phi[2][2] * originECF[2]));
// rotate then translate
enu[0] = ((phi[0][0] * x) + (phi[0][1] * y) + (phi[0][2] * z)) - originENU[0];
enu[1] = ((phi[1][0] * x) + (phi[1][1] * y) + (phi[1][2] * z)) - originENU[1];
enu[2] = ((phi[2][0] * x) + (phi[2][1] * y) + (phi[2][2] * z)) - originENU[2];
return enu;
}
/**
* Convert from Easting, Northing, Up Right-Handed system to Earth Centered Fixed system
* @param x ENU x coordinate (in meters)
* @param y ENU y coordinate (in meters)
* @param z ENU z coordinate (in meters)
* @param centerLon ENU origin longitude (in degrees)
* @param centerLat ENU origin latitude (in degrees)
* @param centerAlt ENU origin altitude (in meters)
* @param ecf reusable ecf result
* @return ecf result coordinate
*/
public static double[] enuToECF(final double x, final double y, final double z, double centerLon,
double centerLat, final double centerAlt, double[] ecf) {
if (ecf == null) {
ecf = new double[3];
}
double[][] phi = createTransposedPhiTransform(centerLon, centerLat, null);
double[] ecfOrigin = llaToECF(centerLon, centerLat, centerAlt, null);
// rotate and translate
ecf[0] = (phi[0][0]*x + phi[0][1]*y + phi[0][2]*z) + ecfOrigin[0];
ecf[1] = (phi[1][0]*x + phi[1][1]*y + phi[1][2]*z) + ecfOrigin[1];
ecf[2] = (phi[2][0]*x + phi[2][1]*y + phi[2][2]*z) + ecfOrigin[2];
return ecf;
}
/**
* Create the rotation matrix for converting Earth Centered Fixed to Easting Northing Up
* @param originLon ENU origin longitude (in degrees)
* @param originLat ENU origin latitude (in degrees)
* @param phiMatrix reusable phi matrix result
* @return phi rotation matrix
*/
private static double[][] createPhiTransform(double originLon, double originLat, double[][] phiMatrix) {
if (phiMatrix == null) {
phiMatrix = new double[3][3];
}
originLon = StrictMath.toRadians(originLon);
originLat = StrictMath.toRadians(originLat);
final double sLon = StrictMath.sin(originLon);
final double cLon = StrictMath.cos(originLon);
final double sLat = StrictMath.sin(originLat);
final double cLat = StrictMath.cos(originLat);
phiMatrix[0][0] = -sLon;
phiMatrix[0][1] = cLon;
phiMatrix[0][2] = 0.0D;
phiMatrix[1][0] = -sLat * cLon;
phiMatrix[1][1] = -sLat * sLon;
phiMatrix[1][2] = cLat;
phiMatrix[2][0] = cLat * cLon;
phiMatrix[2][1] = cLat * sLon;
phiMatrix[2][2] = sLat;
return phiMatrix;
}
/**
* Create the transposed rotation matrix for converting Easting Northing Up coordinates to Earth Centered Fixed
* @param originLon ENU origin longitude (in degrees)
* @param originLat ENU origin latitude (in degrees)
* @param phiMatrix reusable phi rotation matrix result
* @return transposed phi rotation matrix
*/
private static double[][] createTransposedPhiTransform(double originLon, double originLat, double[][] phiMatrix) {
if (phiMatrix == null) {
phiMatrix = new double[3][3];
}
originLon = StrictMath.toRadians(originLon);
originLat = StrictMath.toRadians(originLat);
final double sLat = StrictMath.sin(originLat);
final double cLat = StrictMath.cos(originLat);
final double sLon = StrictMath.sin(originLon);
final double cLon = StrictMath.cos(originLon);
phiMatrix[0][0] = -sLon;
phiMatrix[1][0] = cLon;
phiMatrix[2][0] = 0.0D;
phiMatrix[0][1] = -sLat * cLon;
phiMatrix[1][1] = -sLat * sLon;
phiMatrix[2][1] = cLat;
phiMatrix[0][2] = cLat * cLon;
phiMatrix[1][2] = cLat * sLon;
phiMatrix[2][2] = sLat;
return phiMatrix;
}
/**
* Finds a point along a bearing from a given lon,lat geolocation using vincenty's distance formula
*
* @param lon origin longitude in degrees
* @param lat origin latitude in degrees
* @param bearing azimuthal bearing in degrees
* @param dist distance in meters
* @param pt resulting point
* @return the point along a bearing at a given distance in meters
*/
public static final double[] pointFromLonLatBearing(double lon, double lat, double bearing, double dist, double[] pt) {
if (pt == null) {
pt = new double[2];
}
final double alpha1 = StrictMath.toRadians(bearing);
final double cosA1 = StrictMath.cos(alpha1);
final double sinA1 = StrictMath.sin(alpha1);
final double tanU1 = (1-FLATTENING) * StrictMath.tan(StrictMath.toRadians(lat));
final double cosU1 = 1 / StrictMath.sqrt((1+tanU1*tanU1));
final double sinU1 = tanU1*cosU1;
final double sig1 = StrictMath.atan2(tanU1, cosA1);
final double sinAlpha = cosU1 * sinA1;
final double cosSqAlpha = 1 - sinAlpha*sinAlpha;
final double uSq = cosSqAlpha * (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2) / SEMIMINOR_AXIS2;
final double A = 1 + uSq/16384D*(4096D + uSq * (-768D + uSq * (320D - 175D*uSq)));
final double B = uSq/1024D * (256D + uSq * (-128D + uSq * (74D - 47D * uSq)));
double sigma = dist / (SEMIMINOR_AXIS*A);
double sigmaP;
double sinSigma, cosSigma, cos2SigmaM, deltaSigma;
do {
cos2SigmaM = StrictMath.cos(2*sig1 + sigma);
sinSigma = StrictMath.sin(sigma);
cosSigma = StrictMath.cos(sigma);
deltaSigma = B * sinSigma * (cos2SigmaM + (B/4D) * (cosSigma*(-1+2*cos2SigmaM*cos2SigmaM)-
(B/6) * cos2SigmaM*(-3+4*sinSigma*sinSigma)*(-3+4*cos2SigmaM*cos2SigmaM)));
sigmaP = sigma;
sigma = dist / (SEMIMINOR_AXIS*A) + deltaSigma;
} while (StrictMath.abs(sigma-sigmaP) > 1E-12);
final double tmp = sinU1*sinSigma - cosU1*cosSigma*cosA1;
final double lat2 = StrictMath.atan2(sinU1*cosSigma + cosU1*sinSigma*cosA1,
(1-FLATTENING) * StrictMath.sqrt(sinAlpha*sinAlpha + tmp*tmp));
final double lambda = StrictMath.atan2(sinSigma*sinA1, cosU1*cosSigma - sinU1*sinSigma*cosA1);
final double c = FLATTENING/16 * cosSqAlpha * (4 + FLATTENING * (4 - 3 * cosSqAlpha));
final double lam = lambda - (1-c) * FLATTENING * sinAlpha *
(sigma + c * sinSigma * (cos2SigmaM + c * cosSigma * (-1 + 2* cos2SigmaM*cos2SigmaM)));
pt[0] = lon + StrictMath.toDegrees(lam);
pt[1] = StrictMath.toDegrees(lat2);
return pt;
}
}

View File

@ -1,429 +0,0 @@
package org.apache.lucene.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.ArrayList;
/**
* Basic reusable geo-spatial utility methods
*
* @lucene.experimental
*/
public final class XGeoUtils {
private static final short MIN_LON = -180;
private static final short MIN_LAT = -90;
public static final short BITS = 31;
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
public static final double TOLERANCE = 1E-5;
/** Minimum longitude value. */
public static final double MIN_LON_INCL = -180.0D;
/** Maximum longitude value. */
public static final double MAX_LON_INCL = 180.0D;
/** Minimum latitude value. */
public static final double MIN_LAT_INCL = -90.0D;
/** Maximum latitude value. */
public static final double MAX_LAT_INCL = 90.0D;
// magic numbers for bit interleaving
private static final long MAGIC[] = {
0x5555555555555555L, 0x3333333333333333L,
0x0F0F0F0F0F0F0F0FL, 0x00FF00FF00FF00FFL,
0x0000FFFF0000FFFFL, 0x00000000FFFFFFFFL,
0xAAAAAAAAAAAAAAAAL
};
// shift values for bit interleaving
private static final short SHIFT[] = {1, 2, 4, 8, 16};
public static double LOG2 = StrictMath.log(2);
// No instance:
private XGeoUtils() {
}
public static Long mortonHash(final double lon, final double lat) {
return interleave(scaleLon(lon), scaleLat(lat));
}
public static double mortonUnhashLon(final long hash) {
return unscaleLon(deinterleave(hash));
}
public static double mortonUnhashLat(final long hash) {
return unscaleLat(deinterleave(hash >>> 1));
}
private static long scaleLon(final double val) {
return (long) ((val-MIN_LON) * LON_SCALE);
}
private static long scaleLat(final double val) {
return (long) ((val-MIN_LAT) * LAT_SCALE);
}
private static double unscaleLon(final long val) {
return (val / LON_SCALE) + MIN_LON;
}
private static double unscaleLat(final long val) {
return (val / LAT_SCALE) + MIN_LAT;
}
/**
* Interleaves the first 32 bits of each long value
*
* Adapted from: http://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN
*/
public static long interleave(long v1, long v2) {
v1 = (v1 | (v1 << SHIFT[4])) & MAGIC[4];
v1 = (v1 | (v1 << SHIFT[3])) & MAGIC[3];
v1 = (v1 | (v1 << SHIFT[2])) & MAGIC[2];
v1 = (v1 | (v1 << SHIFT[1])) & MAGIC[1];
v1 = (v1 | (v1 << SHIFT[0])) & MAGIC[0];
v2 = (v2 | (v2 << SHIFT[4])) & MAGIC[4];
v2 = (v2 | (v2 << SHIFT[3])) & MAGIC[3];
v2 = (v2 | (v2 << SHIFT[2])) & MAGIC[2];
v2 = (v2 | (v2 << SHIFT[1])) & MAGIC[1];
v2 = (v2 | (v2 << SHIFT[0])) & MAGIC[0];
return (v2<<1) | v1;
}
/**
* Deinterleaves long value back to two concatenated 32bit values
*/
public static long deinterleave(long b) {
b &= MAGIC[0];
b = (b ^ (b >>> SHIFT[0])) & MAGIC[1];
b = (b ^ (b >>> SHIFT[1])) & MAGIC[2];
b = (b ^ (b >>> SHIFT[2])) & MAGIC[3];
b = (b ^ (b >>> SHIFT[3])) & MAGIC[4];
b = (b ^ (b >>> SHIFT[4])) & MAGIC[5];
return b;
}
public static double compare(final double v1, final double v2) {
final double compare = v1-v2;
return Math.abs(compare) <= TOLERANCE ? 0 : compare;
}
/**
* Puts longitude in range of -180 to +180.
*/
public static double normalizeLon(double lon_deg) {
if (lon_deg >= -180 && lon_deg <= 180) {
return lon_deg; //common case, and avoids slight double precision shifting
}
double off = (lon_deg + 180) % 360;
if (off < 0) {
return 180 + off;
} else if (off == 0 && lon_deg > 0) {
return 180;
} else {
return -180 + off;
}
}
/**
* Puts latitude in range of -90 to 90.
*/
public static double normalizeLat(double lat_deg) {
if (lat_deg >= -90 && lat_deg <= 90) {
return lat_deg; //common case, and avoids slight double precision shifting
}
double off = Math.abs((lat_deg + 90) % 360);
return (off <= 180 ? off : 360-off) - 90;
}
public static final boolean bboxContains(final double lon, final double lat, final double minLon,
final double minLat, final double maxLon, final double maxLat) {
return (compare(lon, minLon) >= 0 && compare(lon, maxLon) <= 0
&& compare(lat, minLat) >= 0 && compare(lat, maxLat) <= 0);
}
/**
* simple even-odd point in polygon computation
* 1. Determine if point is contained in the longitudinal range
* 2. Determine whether point crosses the edge by computing the latitudinal delta
* between the end-point of a parallel vector (originating at the point) and the
* y-component of the edge sink
*
* NOTE: Requires polygon point (x,y) order either clockwise or counter-clockwise
*/
public static boolean pointInPolygon(double[] x, double[] y, double lat, double lon) {
assert x.length == y.length;
boolean inPoly = false;
/**
* Note: This is using a euclidean coordinate system which could result in
* upwards of 110KM error at the equator.
* TODO convert coordinates to cylindrical projection (e.g. mercator)
*/
for (int i = 1; i < x.length; i++) {
if (x[i] < lon && x[i-1] >= lon || x[i-1] < lon && x[i] >= lon) {
if (y[i] + (lon - x[i]) / (x[i-1] - x[i]) * (y[i-1] - y[i]) < lat) {
inPoly = !inPoly;
}
}
}
return inPoly;
}
public static String geoTermToString(long term) {
StringBuilder s = new StringBuilder(64);
final int numberOfLeadingZeros = Long.numberOfLeadingZeros(term);
for (int i = 0; i < numberOfLeadingZeros; i++) {
s.append('0');
}
if (term != 0) {
s.append(Long.toBinaryString(term));
}
return s.toString();
}
public static boolean rectDisjoint(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
return (aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY);
}
/**
* Computes whether a rectangle is wholly within another rectangle (shared boundaries allowed)
*/
public static boolean rectWithin(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
return !(aMinX < bMinX || aMinY < bMinY || aMaxX > bMaxX || aMaxY > bMaxY);
}
public static boolean rectCrosses(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
return !(rectDisjoint(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY) ||
rectWithin(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY));
}
/**
* Computes whether rectangle a contains rectangle b (touching allowed)
*/
public static boolean rectContains(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
return !(bMinX < aMinX || bMinY < aMinY || bMaxX > aMaxX || bMaxY > aMaxY);
}
/**
* Computes whether a rectangle intersects another rectangle (crosses, within, touching, etc)
*/
public static boolean rectIntersects(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
return !((aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY) );
}
/**
* Computes whether a rectangle crosses a shape. (touching not allowed)
*/
public static boolean rectCrossesPoly(final double rMinX, final double rMinY, final double rMaxX,
final double rMaxY, final double[] shapeX, final double[] shapeY,
final double sMinX, final double sMinY, final double sMaxX,
final double sMaxY) {
// short-circuit: if the bounding boxes are disjoint then the shape does not cross
if (rectDisjoint(rMinX, rMinY, rMaxX, rMaxY, sMinX, sMinY, sMaxX, sMaxY)) {
return false;
}
final double[][] bbox = new double[][] { {rMinX, rMinY}, {rMaxX, rMinY}, {rMaxX, rMaxY}, {rMinX, rMaxY}, {rMinX, rMinY} };
final int polyLength = shapeX.length-1;
double d, s, t, a1, b1, c1, a2, b2, c2;
double x00, y00, x01, y01, x10, y10, x11, y11;
// computes the intersection point between each bbox edge and the polygon edge
for (short b=0; b<4; ++b) {
a1 = bbox[b+1][1]-bbox[b][1];
b1 = bbox[b][0]-bbox[b+1][0];
c1 = a1*bbox[b+1][0] + b1*bbox[b+1][1];
for (int p=0; p<polyLength; ++p) {
a2 = shapeY[p+1]-shapeY[p];
b2 = shapeX[p]-shapeX[p+1];
// compute determinant
d = a1*b2 - a2*b1;
if (d != 0) {
// lines are not parallel, check intersecting points
c2 = a2*shapeX[p+1] + b2*shapeY[p+1];
s = (1/d)*(b2*c1 - b1*c2);
t = (1/d)*(a1*c2 - a2*c1);
x00 = StrictMath.min(bbox[b][0], bbox[b+1][0]) - TOLERANCE;
x01 = StrictMath.max(bbox[b][0], bbox[b+1][0]) + TOLERANCE;
y00 = StrictMath.min(bbox[b][1], bbox[b+1][1]) - TOLERANCE;
y01 = StrictMath.max(bbox[b][1], bbox[b+1][1]) + TOLERANCE;
x10 = StrictMath.min(shapeX[p], shapeX[p+1]) - TOLERANCE;
x11 = StrictMath.max(shapeX[p], shapeX[p+1]) + TOLERANCE;
y10 = StrictMath.min(shapeY[p], shapeY[p+1]) - TOLERANCE;
y11 = StrictMath.max(shapeY[p], shapeY[p+1]) + TOLERANCE;
// check whether the intersection point is touching one of the line segments
boolean touching = ((x00 == s && y00 == t) || (x01 == s && y01 == t))
|| ((x10 == s && y10 == t) || (x11 == s && y11 == t));
// if line segments are not touching and the intersection point is within the range of either segment
if (!(touching || x00 > s || x01 < s || y00 > t || y01 < t || x10 > s || x11 < s || y10 > t || y11 < t)) {
return true;
}
}
} // for each poly edge
} // for each bbox edge
return false;
}
/**
* Converts a given circle (defined as a point/radius) to an approximated line-segment polygon
*
* @param lon longitudinal center of circle (in degrees)
* @param lat latitudinal center of circle (in degrees)
* @param radius distance radius of circle (in meters)
* @return a list of lon/lat points representing the circle
*/
@SuppressWarnings({"unchecked","rawtypes"})
public static ArrayList<double[]> circleToPoly(final double lon, final double lat, final double radius) {
double angle;
// a little under-sampling (to limit the number of polygonal points): using archimedes estimation of pi
final int sides = 25;
ArrayList<double[]> geometry = new ArrayList();
double[] lons = new double[sides];
double[] lats = new double[sides];
double[] pt = new double[2];
final int sidesLen = sides-1;
for (int i=0; i<sidesLen; ++i) {
angle = (i*360/sides);
pt = XGeoProjectionUtils.pointFromLonLatBearing(lon, lat, angle, radius, pt);
lons[i] = pt[0];
lats[i] = pt[1];
}
// close the poly
lons[sidesLen] = lons[0];
lats[sidesLen] = lats[0];
geometry.add(lons);
geometry.add(lats);
return geometry;
}
/**
* Computes whether a rectangle is within a given polygon (shared boundaries allowed)
*/
public static boolean rectWithinPoly(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
final double[] shapeX, final double[] shapeY, final double sMinX,
final double sMinY, final double sMaxX, final double sMaxY) {
// check if rectangle crosses poly (to handle concave/pacman polys), then check that all 4 corners
// are contained
return !(rectCrossesPoly(rMinX, rMinY, rMaxX, rMaxY, shapeX, shapeY, sMinX, sMinY, sMaxX, sMaxY) ||
!pointInPolygon(shapeX, shapeY, rMinY, rMinX) || !pointInPolygon(shapeX, shapeY, rMinY, rMaxX) ||
!pointInPolygon(shapeX, shapeY, rMaxY, rMaxX) || !pointInPolygon(shapeX, shapeY, rMaxY, rMinX));
}
private static boolean rectAnyCornersOutsideCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
final double centerLon, final double centerLat, final double radius) {
return (SloppyMath.haversin(centerLat, centerLon, rMinY, rMinX)*1000.0 > radius
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMinX)*1000.0 > radius
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMaxX)*1000.0 > radius
|| SloppyMath.haversin(centerLat, centerLon, rMinY, rMaxX)*1000.0 > radius);
}
private static boolean rectAnyCornersInCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
final double centerLon, final double centerLat, final double radius) {
return (SloppyMath.haversin(centerLat, centerLon, rMinY, rMinX)*1000.0 <= radius
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMinX)*1000.0 <= radius
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMaxX)*1000.0 <= radius
|| SloppyMath.haversin(centerLat, centerLon, rMinY, rMaxX)*1000.0 <= radius);
}
public static boolean rectWithinCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
final double centerLon, final double centerLat, final double radius) {
return !(rectAnyCornersOutsideCircle(rMinX, rMinY, rMaxX, rMaxY, centerLon, centerLat, radius));
}
/**
* Computes whether a rectangle crosses a circle
*/
public static boolean rectCrossesCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
final double centerLon, final double centerLat, final double radius) {
return rectAnyCornersInCircle(rMinX, rMinY, rMaxX, rMaxY, centerLon, centerLat, radius)
|| lineCrossesSphere(rMinX, rMinY, 0, rMaxX, rMinY, 0, centerLon, centerLat, 0, radius)
|| lineCrossesSphere(rMaxX, rMinY, 0, rMaxX, rMaxY, 0, centerLon, centerLat, 0, radius)
|| lineCrossesSphere(rMaxX, rMaxY, 0, rMinX, rMaxY, 0, centerLon, centerLat, 0, radius)
|| lineCrossesSphere(rMinX, rMaxY, 0, rMinX, rMinY, 0, centerLon, centerLat, 0, radius);
}
/**
* Computes whether or a 3dimensional line segment intersects or crosses a sphere
*
* @param lon1 longitudinal location of the line segment start point (in degrees)
* @param lat1 latitudinal location of the line segment start point (in degrees)
* @param alt1 altitude of the line segment start point (in degrees)
* @param lon2 longitudinal location of the line segment end point (in degrees)
* @param lat2 latitudinal location of the line segment end point (in degrees)
* @param alt2 altitude of the line segment end point (in degrees)
* @param centerLon longitudinal location of center search point (in degrees)
* @param centerLat latitudinal location of center search point (in degrees)
* @param centerAlt altitude of the center point (in meters)
* @param radius search sphere radius (in meters)
* @return whether the provided line segment is a secant of the
*/
private static boolean lineCrossesSphere(double lon1, double lat1, double alt1, double lon2,
double lat2, double alt2, double centerLon, double centerLat,
double centerAlt, double radius) {
// convert to cartesian 3d (in meters)
double[] ecf1 = XGeoProjectionUtils.llaToECF(lon1, lat1, alt1, null);
double[] ecf2 = XGeoProjectionUtils.llaToECF(lon2, lat2, alt2, null);
double[] cntr = XGeoProjectionUtils.llaToECF(centerLon, centerLat, centerAlt, null);
final double dX = ecf2[0] - ecf1[0];
final double dY = ecf2[1] - ecf1[1];
final double dZ = ecf2[2] - ecf1[2];
final double fX = ecf1[0] - cntr[0];
final double fY = ecf1[1] - cntr[1];
final double fZ = ecf1[2] - cntr[2];
final double a = dX*dX + dY*dY + dZ*dZ;
final double b = 2 * (fX*dX + fY*dY + fZ*dZ);
final double c = (fX*fX + fY*fY + fZ*fZ) - (radius*radius);
double discrim = (b*b)-(4*a*c);
if (discrim < 0) {
return false;
}
discrim = StrictMath.sqrt(discrim);
final double a2 = 2*a;
final double t1 = (-b - discrim)/a2;
final double t2 = (-b + discrim)/a2;
if ( (t1 < 0 || t1 > 1) ) {
return !(t2 < 0 || t2 > 1);
}
return true;
}
public static boolean isValidLat(double lat) {
return Double.isNaN(lat) == false && lat >= MIN_LAT_INCL && lat <= MAX_LAT_INCL;
}
public static boolean isValidLon(double lon) {
return Double.isNaN(lon) == false && lon >= MIN_LON_INCL && lon <= MAX_LON_INCL;
}
}

View File

@ -264,7 +264,9 @@ public class Version {
public static final int V_2_0_0_rc1_ID = 2000051; public static final int V_2_0_0_rc1_ID = 2000051;
public static final Version V_2_0_0_rc1 = new Version(V_2_0_0_rc1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1); public static final Version V_2_0_0_rc1 = new Version(V_2_0_0_rc1_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_0_0_ID = 2000099; public static final int V_2_0_0_ID = 2000099;
public static final Version V_2_0_0 = new Version(V_2_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1); public static final Version V_2_0_0 = new Version(V_2_0_0_ID, false, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_0_1_ID = 2000199;
public static final Version V_2_0_1 = new Version(V_2_0_1_ID, true, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_1_0_ID = 2010099; public static final int V_2_1_0_ID = 2010099;
public static final Version V_2_1_0 = new Version(V_2_1_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0); public static final Version V_2_1_0 = new Version(V_2_1_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0);
public static final int V_2_2_0_ID = 2020099; public static final int V_2_2_0_ID = 2020099;
@ -289,6 +291,8 @@ public class Version {
return V_2_2_0; return V_2_2_0;
case V_2_1_0_ID: case V_2_1_0_ID:
return V_2_1_0; return V_2_1_0;
case V_2_0_1_ID:
return V_2_0_1;
case V_2_0_0_ID: case V_2_0_0_ID:
return V_2_0_0; return V_2_0_0;
case V_2_0_0_rc1_ID: case V_2_0_0_rc1_ID:

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.discovery.DiscoveryStats;
import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.HttpStats;
import org.elasticsearch.indices.NodeIndicesStats; import org.elasticsearch.indices.NodeIndicesStats;
import org.elasticsearch.indices.breaker.AllCircuitBreakerStats; import org.elasticsearch.indices.breaker.AllCircuitBreakerStats;
@ -78,6 +79,9 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
@Nullable @Nullable
private ScriptStats scriptStats; private ScriptStats scriptStats;
@Nullable
private DiscoveryStats discoveryStats;
NodeStats() { NodeStats() {
} }
@ -85,7 +89,8 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
@Nullable OsStats os, @Nullable ProcessStats process, @Nullable JvmStats jvm, @Nullable ThreadPoolStats threadPool, @Nullable OsStats os, @Nullable ProcessStats process, @Nullable JvmStats jvm, @Nullable ThreadPoolStats threadPool,
@Nullable FsInfo fs, @Nullable TransportStats transport, @Nullable HttpStats http, @Nullable FsInfo fs, @Nullable TransportStats transport, @Nullable HttpStats http,
@Nullable AllCircuitBreakerStats breaker, @Nullable AllCircuitBreakerStats breaker,
@Nullable ScriptStats scriptStats) { @Nullable ScriptStats scriptStats,
@Nullable DiscoveryStats discoveryStats) {
super(node); super(node);
this.timestamp = timestamp; this.timestamp = timestamp;
this.indices = indices; this.indices = indices;
@ -98,6 +103,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
this.http = http; this.http = http;
this.breaker = breaker; this.breaker = breaker;
this.scriptStats = scriptStats; this.scriptStats = scriptStats;
this.discoveryStats = discoveryStats;
} }
public long getTimestamp() { public long getTimestamp() {
@ -177,6 +183,11 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
return this.scriptStats; return this.scriptStats;
} }
@Nullable
public DiscoveryStats getDiscoveryStats() {
return this.discoveryStats;
}
public static NodeStats readNodeStats(StreamInput in) throws IOException { public static NodeStats readNodeStats(StreamInput in) throws IOException {
NodeStats nodeInfo = new NodeStats(); NodeStats nodeInfo = new NodeStats();
nodeInfo.readFrom(in); nodeInfo.readFrom(in);
@ -213,6 +224,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
} }
breaker = AllCircuitBreakerStats.readOptionalAllCircuitBreakerStats(in); breaker = AllCircuitBreakerStats.readOptionalAllCircuitBreakerStats(in);
scriptStats = in.readOptionalStreamable(new ScriptStats()); scriptStats = in.readOptionalStreamable(new ScriptStats());
discoveryStats = in.readOptionalStreamable(new DiscoveryStats(null));
} }
@ -270,6 +282,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
} }
out.writeOptionalStreamable(breaker); out.writeOptionalStreamable(breaker);
out.writeOptionalStreamable(scriptStats); out.writeOptionalStreamable(scriptStats);
out.writeOptionalStreamable(discoveryStats);
} }
@Override @Override
@ -321,6 +334,10 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
getScriptStats().toXContent(builder, params); getScriptStats().toXContent(builder, params);
} }
if (getDiscoveryStats() != null) {
getDiscoveryStats().toXContent(builder, params);
}
return builder; return builder;
} }
} }

View File

@ -41,6 +41,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
private boolean http; private boolean http;
private boolean breaker; private boolean breaker;
private boolean script; private boolean script;
private boolean discovery;
public NodesStatsRequest() { public NodesStatsRequest() {
} }
@ -67,6 +68,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
this.http = true; this.http = true;
this.breaker = true; this.breaker = true;
this.script = true; this.script = true;
this.discovery = true;
return this; return this;
} }
@ -84,6 +86,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
this.http = false; this.http = false;
this.breaker = false; this.breaker = false;
this.script = false; this.script = false;
this.discovery = false;
return this; return this;
} }
@ -234,6 +237,20 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
return this; return this;
} }
public boolean discovery() {
return this.discovery;
}
/**
* Should the node's discovery stats be returned.
*/
public NodesStatsRequest discovery(boolean discovery) {
this.discovery = discovery;
return this;
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
@ -247,6 +264,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
http = in.readBoolean(); http = in.readBoolean();
breaker = in.readBoolean(); breaker = in.readBoolean();
script = in.readBoolean(); script = in.readBoolean();
discovery = in.readBoolean();
} }
@Override @Override
@ -262,6 +280,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
out.writeBoolean(http); out.writeBoolean(http);
out.writeBoolean(breaker); out.writeBoolean(breaker);
out.writeBoolean(script); out.writeBoolean(script);
out.writeBoolean(discovery);
} }
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.admin.cluster.node.stats; package org.elasticsearch.action.admin.cluster.node.stats;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
@ -130,4 +129,12 @@ public class NodesStatsRequestBuilder extends NodesOperationRequestBuilder<Nodes
request.http(http); request.http(http);
return this; return this;
} }
/**
* Should the discovery stats be returned.
*/
public NodesStatsRequestBuilder setDiscovery(boolean discovery) {
request.discovery(discovery);
return this;
}
} }

View File

@ -80,7 +80,7 @@ public class TransportNodesStatsAction extends TransportNodesAction<NodesStatsRe
protected NodeStats nodeOperation(NodeStatsRequest nodeStatsRequest) { protected NodeStats nodeOperation(NodeStatsRequest nodeStatsRequest) {
NodesStatsRequest request = nodeStatsRequest.request; NodesStatsRequest request = nodeStatsRequest.request;
return nodeService.stats(request.indices(), request.os(), request.process(), request.jvm(), request.threadPool(), return nodeService.stats(request.indices(), request.os(), request.process(), request.jvm(), request.threadPool(),
request.fs(), request.transport(), request.http(), request.breaker(), request.script()); request.fs(), request.transport(), request.http(), request.breaker(), request.script(), request.discovery());
} }
@Override @Override

View File

@ -101,7 +101,7 @@ public class TransportClusterStatsAction extends TransportNodesAction<ClusterSta
@Override @Override
protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest) { protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest) {
NodeInfo nodeInfo = nodeService.info(false, true, false, true, false, true, false, true); NodeInfo nodeInfo = nodeService.info(false, true, false, true, false, true, false, true);
NodeStats nodeStats = nodeService.stats(CommonStatsFlags.NONE, false, true, true, false, true, false, false, false, false); NodeStats nodeStats = nodeService.stats(CommonStatsFlags.NONE, false, true, true, false, true, false, false, false, false, false);
List<ShardStats> shardsStats = new ArrayList<>(); List<ShardStats> shardsStats = new ArrayList<>();
for (IndexService indexService : indicesService) { for (IndexService indexService : indicesService) {
for (IndexShard indexShard : indexService) { for (IndexShard indexShard : indexService) {

View File

@ -31,7 +31,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService; import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -77,42 +76,22 @@ public class TransportDeleteIndexAction extends TransportMasterNodeAction<Delete
@Override @Override
protected void masterOperation(final DeleteIndexRequest request, final ClusterState state, final ActionListener<DeleteIndexResponse> listener) { protected void masterOperation(final DeleteIndexRequest request, final ClusterState state, final ActionListener<DeleteIndexResponse> listener) {
String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request);
if (concreteIndices.length == 0) { if (concreteIndices.length == 0) {
listener.onResponse(new DeleteIndexResponse(true)); listener.onResponse(new DeleteIndexResponse(true));
return; return;
} }
// TODO: this API should be improved, currently, if one delete index failed, we send a failure, we should send a response array that includes all the indices that were deleted deleteIndexService.deleteIndices(new MetaDataDeleteIndexService.Request(concreteIndices).timeout(request.timeout()).masterTimeout(request.masterNodeTimeout()), new MetaDataDeleteIndexService.Listener() {
final CountDown count = new CountDown(concreteIndices.length);
for (final String index : concreteIndices) {
deleteIndexService.deleteIndex(new MetaDataDeleteIndexService.Request(index).timeout(request.timeout()).masterTimeout(request.masterNodeTimeout()), new MetaDataDeleteIndexService.Listener() {
private volatile Throwable lastFailure; @Override
private volatile boolean ack = true; public void onResponse(MetaDataDeleteIndexService.Response response) {
listener.onResponse(new DeleteIndexResponse(response.acknowledged()));
}
@Override @Override
public void onResponse(MetaDataDeleteIndexService.Response response) { public void onFailure(Throwable t) {
if (!response.acknowledged()) { listener.onFailure(t);
ack = false; }
} });
if (count.countDown()) {
if (lastFailure != null) {
listener.onFailure(lastFailure);
} else {
listener.onResponse(new DeleteIndexResponse(ack));
}
}
}
@Override
public void onFailure(Throwable t) {
logger.debug("[{}] failed to delete index", t, index);
lastFailure = t;
if (count.countDown()) {
listener.onFailure(t);
}
}
});
}
} }
} }

View File

@ -179,7 +179,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
SearchContext.setCurrent(searchContext); SearchContext.setCurrent(searchContext);
try { try {
if (request.source() != null && request.source().length() > 0) { if (request.source() != null && request.source().length() > 0) {
searchContext.parsedQuery(queryParserService.parseQuery(request.source())); searchContext.parsedQuery(queryParserService.parseTopLevelQuery(request.source()));
} }
searchContext.preProcess(); searchContext.preProcess();

View File

@ -21,13 +21,11 @@ package org.elasticsearch.action.explain;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.support.QuerySourceBuilder;
import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.action.support.single.shard.SingleShardRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.fetch.source.FetchSourceContext;
import java.io.IOException; import java.io.IOException;
@ -41,7 +39,7 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
private String id; private String id;
private String routing; private String routing;
private String preference; private String preference;
private BytesReference source; private QueryBuilder<?> query;
private String[] fields; private String[] fields;
private FetchSourceContext fetchSourceContext; private FetchSourceContext fetchSourceContext;
@ -102,17 +100,12 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
return this; return this;
} }
public BytesReference source() { public QueryBuilder<?> query() {
return source; return query;
} }
public ExplainRequest source(QuerySourceBuilder sourceBuilder) { public ExplainRequest query(QueryBuilder<?> query) {
this.source = sourceBuilder.buildAsBytes(Requests.CONTENT_TYPE); this.query = query;
return this;
}
public ExplainRequest source(BytesReference source) {
this.source = source;
return this; return this;
} }
@ -159,8 +152,8 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
if (id == null) { if (id == null) {
validationException = ValidateActions.addValidationError("id is missing", validationException); validationException = ValidateActions.addValidationError("id is missing", validationException);
} }
if (source == null) { if (query == null) {
validationException = ValidateActions.addValidationError("source is missing", validationException); validationException = ValidateActions.addValidationError("query is missing", validationException);
} }
return validationException; return validationException;
} }
@ -172,7 +165,7 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
id = in.readString(); id = in.readString();
routing = in.readOptionalString(); routing = in.readOptionalString();
preference = in.readOptionalString(); preference = in.readOptionalString();
source = in.readBytesReference(); query = in.readQuery();
filteringAlias = in.readStringArray(); filteringAlias = in.readStringArray();
if (in.readBoolean()) { if (in.readBoolean()) {
fields = in.readStringArray(); fields = in.readStringArray();
@ -189,7 +182,7 @@ public class ExplainRequest extends SingleShardRequest<ExplainRequest> {
out.writeString(id); out.writeString(id);
out.writeOptionalString(routing); out.writeOptionalString(routing);
out.writeOptionalString(preference); out.writeOptionalString(preference);
out.writeBytesReference(source); out.writeQuery(query);
out.writeStringArray(filteringAlias); out.writeStringArray(filteringAlias);
if (fields != null) { if (fields != null) {
out.writeBoolean(true); out.writeBoolean(true);

View File

@ -19,12 +19,10 @@
package org.elasticsearch.action.explain; package org.elasticsearch.action.explain;
import org.elasticsearch.action.support.QuerySourceBuilder;
import org.elasticsearch.action.support.single.shard.SingleShardOperationRequestBuilder; import org.elasticsearch.action.support.single.shard.SingleShardOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.fetch.source.FetchSourceContext;
@ -33,8 +31,6 @@ import org.elasticsearch.search.fetch.source.FetchSourceContext;
*/ */
public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder<ExplainRequest, ExplainResponse, ExplainRequestBuilder> { public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder<ExplainRequest, ExplainResponse, ExplainRequestBuilder> {
private QuerySourceBuilder sourceBuilder;
ExplainRequestBuilder(ElasticsearchClient client, ExplainAction action) { ExplainRequestBuilder(ElasticsearchClient client, ExplainAction action) {
super(client, action, new ExplainRequest()); super(client, action, new ExplainRequest());
} }
@ -87,15 +83,7 @@ public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder<Ex
* Sets the query to get a score explanation for. * Sets the query to get a score explanation for.
*/ */
public ExplainRequestBuilder setQuery(QueryBuilder query) { public ExplainRequestBuilder setQuery(QueryBuilder query) {
sourceBuilder().setQuery(query); request.query(query);
return this;
}
/**
* Sets the query to get a score explanation for.
*/
public ExplainRequestBuilder setQuery(BytesReference query) {
sourceBuilder().setQuery(query);
return this; return this;
} }
@ -151,28 +139,4 @@ public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder<Ex
} }
return this; return this;
} }
/**
* Sets the full source of the explain request (for example, wrapping an actual query).
*/
public ExplainRequestBuilder setSource(BytesReference source) {
request().source(source);
return this;
}
@Override
protected ExplainRequest beforeExecute(ExplainRequest request) {
if (sourceBuilder != null) {
request.source(sourceBuilder);
}
return request;
}
private QuerySourceBuilder sourceBuilder() {
if (sourceBuilder == null) {
sourceBuilder = new QuerySourceBuilder();
}
return sourceBuilder;
}
} }

View File

@ -121,7 +121,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
SearchContext.setCurrent(context); SearchContext.setCurrent(context);
try { try {
context.parsedQuery(indexService.queryParserService().parseQuery(request.source())); context.parsedQuery(indexService.queryParserService().toQuery(request.query()));
context.preProcess(); context.preProcess();
int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().context.docBase; int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().context.docBase;
Explanation explanation = context.searcher().explain(context.query(), topLevelDocId); Explanation explanation = context.searcher().explain(context.query(), topLevelDocId);

View File

@ -36,6 +36,7 @@ import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.shard.NoOpShardStateActionListener;
import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
@ -686,7 +687,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
/** /**
* inner class is responsible for send the requests to all replica shards and manage the responses * inner class is responsible for send the requests to all replica shards and manage the responses
*/ */
final class ReplicationPhase extends AbstractRunnable { final class ReplicationPhase extends AbstractRunnable implements ShardStateAction.Listener {
private final ReplicaRequest replicaRequest; private final ReplicaRequest replicaRequest;
private final Response finalResponse; private final Response finalResponse;
@ -821,6 +822,16 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
forceFinishAsFailed(t); forceFinishAsFailed(t);
} }
@Override
public void onShardFailedNoMaster() {
}
@Override
public void onShardFailedFailure(DiscoveryNode master, TransportException e) {
}
/** /**
* start sending current requests to replicas * start sending current requests to replicas
*/ */
@ -886,7 +897,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
logger.trace("[{}] transport failure during replica request [{}] ", exp, node, replicaRequest); logger.trace("[{}] transport failure during replica request [{}] ", exp, node, replicaRequest);
if (ignoreReplicaException(exp) == false) { if (ignoreReplicaException(exp) == false) {
logger.warn("{} failed to perform {} on node {}", exp, shardIt.shardId(), actionName, node); logger.warn("{} failed to perform {} on node {}", exp, shardIt.shardId(), actionName, node);
shardStateAction.shardFailed(shard, indexMetaData.getIndexUUID(), "failed to perform " + actionName + " on replica on node " + node, exp); shardStateAction.shardFailed(shard, indexMetaData.getIndexUUID(), "failed to perform " + actionName + " on replica on node " + node, exp, ReplicationPhase.this);
} }
} }

View File

@ -21,14 +21,12 @@ package org.elasticsearch.bootstrap;
import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.SuppressForbidden;
import java.net.URI;
import java.net.URL; import java.net.URL;
import java.security.CodeSource; import java.security.CodeSource;
import java.security.Permission; import java.security.Permission;
import java.security.PermissionCollection; import java.security.PermissionCollection;
import java.security.Policy; import java.security.Policy;
import java.security.ProtectionDomain; import java.security.ProtectionDomain;
import java.security.URIParameter;
import java.util.Map; import java.util.Map;
/** custom policy for union of static and dynamic permissions */ /** custom policy for union of static and dynamic permissions */
@ -42,13 +40,11 @@ final class ESPolicy extends Policy {
final Policy template; final Policy template;
final Policy untrusted; final Policy untrusted;
final PermissionCollection dynamic; final PermissionCollection dynamic;
final Map<String,PermissionCollection> plugins; final Map<String,Policy> plugins;
public ESPolicy(PermissionCollection dynamic, Map<String,PermissionCollection> plugins) throws Exception { public ESPolicy(PermissionCollection dynamic, Map<String,Policy> plugins) {
URI policyUri = getClass().getResource(POLICY_RESOURCE).toURI(); this.template = Security.readPolicy(getClass().getResource(POLICY_RESOURCE), JarHell.parseClassPath());
URI untrustedUri = getClass().getResource(UNTRUSTED_RESOURCE).toURI(); this.untrusted = Security.readPolicy(getClass().getResource(UNTRUSTED_RESOURCE), new URL[0]);
this.template = Policy.getInstance("JavaPolicy", new URIParameter(policyUri));
this.untrusted = Policy.getInstance("JavaPolicy", new URIParameter(untrustedUri));
this.dynamic = dynamic; this.dynamic = dynamic;
this.plugins = plugins; this.plugins = plugins;
} }
@ -69,9 +65,10 @@ final class ESPolicy extends Policy {
if (BootstrapInfo.UNTRUSTED_CODEBASE.equals(location.getFile())) { if (BootstrapInfo.UNTRUSTED_CODEBASE.equals(location.getFile())) {
return untrusted.implies(domain, permission); return untrusted.implies(domain, permission);
} }
// check for an additional plugin permission // check for an additional plugin permission: plugin policy is
PermissionCollection plugin = plugins.get(location.getFile()); // only consulted for its codesources.
if (plugin != null && plugin.implies(permission)) { Policy plugin = plugins.get(location.getFile());
if (plugin != null && plugin.implies(domain, permission)) {
return true; return true;
} }
} }

View File

@ -20,10 +20,12 @@
package org.elasticsearch.bootstrap; package org.elasticsearch.bootstrap;
import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.plugins.PluginInfo;
import java.io.*; import java.io.*;
import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.nio.file.AccessMode; import java.nio.file.AccessMode;
import java.nio.file.DirectoryStream; import java.nio.file.DirectoryStream;
@ -32,15 +34,14 @@ import java.nio.file.Files;
import java.nio.file.NotDirectoryException; import java.nio.file.NotDirectoryException;
import java.nio.file.Path; import java.nio.file.Path;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.security.PermissionCollection;
import java.security.Permissions; import java.security.Permissions;
import java.security.Policy; import java.security.Policy;
import java.security.URIParameter; import java.security.URIParameter;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.IdentityHashMap; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.regex.Pattern;
/** /**
* Initializes SecurityManager with necessary permissions. * Initializes SecurityManager with necessary permissions.
@ -87,6 +88,11 @@ import java.util.regex.Pattern;
* <pre> * <pre>
* JAVA_OPTS="-Djava.security.debug=access,failure" bin/elasticsearch * JAVA_OPTS="-Djava.security.debug=access,failure" bin/elasticsearch
* </pre> * </pre>
* <p>
* When running tests you have to pass it to the test runner like this:
* <pre>
* mvn test -Dtests.jvm.argline="-Djava.security.debug=access,failure" ...
* </pre>
* See <a href="https://docs.oracle.com/javase/7/docs/technotes/guides/security/troubleshooting-security.html"> * See <a href="https://docs.oracle.com/javase/7/docs/technotes/guides/security/troubleshooting-security.html">
* Troubleshooting Security</a> for information. * Troubleshooting Security</a> for information.
*/ */
@ -99,8 +105,6 @@ final class Security {
* Can only happen once! * Can only happen once!
*/ */
static void configure(Environment environment) throws Exception { static void configure(Environment environment) throws Exception {
// set properties for jar locations
setCodebaseProperties();
// enable security policy: union of template and environment-based paths, and possibly plugin permissions // enable security policy: union of template and environment-based paths, and possibly plugin permissions
Policy.setPolicy(new ESPolicy(createPermissions(environment), getPluginPermissions(environment))); Policy.setPolicy(new ESPolicy(createPermissions(environment), getPluginPermissions(environment)));
@ -121,70 +125,35 @@ final class Security {
selfTest(); selfTest();
} }
// mapping of jars to codebase properties
// note that this is only read once, when policy is parsed.
private static final Map<Pattern,String> SPECIAL_JARS;
static {
Map<Pattern,String> m = new IdentityHashMap<>();
m.put(Pattern.compile(".*lucene-core-.*\\.jar$"), "es.security.jar.lucene.core");
m.put(Pattern.compile(".*lucene-test-framework-.*\\.jar$"), "es.security.jar.lucene.testframework");
m.put(Pattern.compile(".*randomizedtesting-runner-.*\\.jar$"), "es.security.jar.randomizedtesting.runner");
m.put(Pattern.compile(".*junit4-ant-.*\\.jar$"), "es.security.jar.randomizedtesting.junit4");
m.put(Pattern.compile(".*securemock-.*\\.jar$"), "es.security.jar.elasticsearch.securemock");
SPECIAL_JARS = Collections.unmodifiableMap(m);
}
/**
* Sets properties (codebase URLs) for policy files.
* JAR locations are not fixed so we have to find the locations of
* the ones we want.
*/
@SuppressForbidden(reason = "proper use of URL")
static void setCodebaseProperties() {
for (URL url : JarHell.parseClassPath()) {
for (Map.Entry<Pattern,String> e : SPECIAL_JARS.entrySet()) {
if (e.getKey().matcher(url.getPath()).matches()) {
String prop = e.getValue();
if (System.getProperty(prop) != null) {
throw new IllegalStateException("property: " + prop + " is unexpectedly set: " + System.getProperty(prop));
}
System.setProperty(prop, url.toString());
}
}
}
for (String prop : SPECIAL_JARS.values()) {
if (System.getProperty(prop) == null) {
System.setProperty(prop, "file:/dev/null"); // no chance to be interpreted as "all"
}
}
}
/** /**
* Sets properties (codebase URLs) for policy files. * Sets properties (codebase URLs) for policy files.
* we look for matching plugins and set URLs to fit * we look for matching plugins and set URLs to fit
*/ */
@SuppressForbidden(reason = "proper use of URL") @SuppressForbidden(reason = "proper use of URL")
static Map<String,PermissionCollection> getPluginPermissions(Environment environment) throws IOException, NoSuchAlgorithmException { static Map<String,Policy> getPluginPermissions(Environment environment) throws IOException, NoSuchAlgorithmException {
Map<String,PermissionCollection> map = new HashMap<>(); Map<String,Policy> map = new HashMap<>();
if (Files.exists(environment.pluginsFile())) { if (Files.exists(environment.pluginsFile())) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) { try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) {
for (Path plugin : stream) { for (Path plugin : stream) {
Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY); Path policyFile = plugin.resolve(PluginInfo.ES_PLUGIN_POLICY);
if (Files.exists(policyFile)) { if (Files.exists(policyFile)) {
// parse the plugin's policy file into a set of permissions // first get a list of URLs for the plugins' jars:
Policy policy = Policy.getInstance("JavaPolicy", new URIParameter(policyFile.toUri())); // we resolve symlinks so map is keyed on the normalize codebase name
PermissionCollection permissions = policy.getPermissions(Security.class.getProtectionDomain()); List<URL> codebases = new ArrayList<>();
// this method is supported with the specific implementation we use, but just check for safety.
if (permissions == Policy.UNSUPPORTED_EMPTY_COLLECTION) {
throw new UnsupportedOperationException("JavaPolicy implementation does not support retrieving permissions");
}
// grant the permissions to each jar in the plugin
try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) { try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) {
for (Path jar : jarStream) { for (Path jar : jarStream) {
if (map.put(jar.toUri().toURL().getFile(), permissions) != null) { codebases.add(jar.toRealPath().toUri().toURL());
// just be paranoid ok? }
throw new IllegalStateException("per-plugin permissions already granted for jar file: " + jar); }
}
// parse the plugin's policy file into a set of permissions
Policy policy = readPolicy(policyFile.toUri().toURL(), codebases.toArray(new URL[codebases.size()]));
// consult this policy for each of the plugin's jars:
for (URL url : codebases) {
if (map.put(url.getFile(), policy) != null) {
// just be paranoid ok?
throw new IllegalStateException("per-plugin permissions already granted for jar file: " + url);
} }
} }
} }
@ -194,6 +163,35 @@ final class Security {
return Collections.unmodifiableMap(map); return Collections.unmodifiableMap(map);
} }
/**
* Reads and returns the specified {@code policyFile}.
* <p>
* Resources (e.g. jar files and directories) listed in {@code codebases} location
* will be provided to the policy file via a system property of the short name:
* e.g. <code>${codebase.joda-convert-1.2.jar}</code> would map to full URL.
*/
@SuppressForbidden(reason = "accesses fully qualified URLs to configure security")
static Policy readPolicy(URL policyFile, URL codebases[]) {
try {
try {
// set codebase properties
for (URL url : codebases) {
String shortName = PathUtils.get(url.toURI()).getFileName().toString();
System.setProperty("codebase." + shortName, url.toString());
}
return Policy.getInstance("JavaPolicy", new URIParameter(policyFile.toURI()));
} finally {
// clear codebase properties
for (URL url : codebases) {
String shortName = PathUtils.get(url.toURI()).getFileName().toString();
System.clearProperty("codebase." + shortName);
}
}
} catch (NoSuchAlgorithmException | URISyntaxException e) {
throw new IllegalArgumentException("unable to parse policy file `" + policyFile + "`", e);
}
}
/** returns dynamic Permissions to configured paths */ /** returns dynamic Permissions to configured paths */
static Permissions createPermissions(Environment environment) throws IOException { static Permissions createPermissions(Environment environment) throws IOException {
Permissions policy = new Permissions(); Permissions policy = new Permissions();

View File

@ -143,7 +143,7 @@ public class TransportClient extends AbstractClient {
modules.add(new ClusterNameModule(this.settings)); modules.add(new ClusterNameModule(this.settings));
modules.add(new ThreadPoolModule(threadPool)); modules.add(new ThreadPoolModule(threadPool));
modules.add(new TransportModule(this.settings)); modules.add(new TransportModule(this.settings));
modules.add(new SearchModule(this.settings) { modules.add(new SearchModule() {
@Override @Override
protected void configure() { protected void configure() {
// noop // noop

View File

@ -17,30 +17,7 @@
* under the License. * under the License.
*/ */
package org.elasticsearch.index.query; package org.elasticsearch.cluster.action.shard;
import java.io.IOException; public class NoOpShardStateActionListener implements ShardStateAction.Listener {
/**
* Parser for query filter
* @deprecated use any query instead directly, possible since queries and filters are merged.
*/
// TODO: remove when https://github.com/elastic/elasticsearch/issues/13326 is fixed
@Deprecated
public class QueryFilterParser implements QueryParser<QueryFilterBuilder> {
@Override
public String[] names() {
return new String[]{QueryFilterBuilder.NAME};
}
@Override
public QueryFilterBuilder fromXContent(QueryParseContext parseContext) throws IOException {
return new QueryFilterBuilder(parseContext.parseInnerQueryBuilder());
}
@Override
public QueryFilterBuilder getBuilderPrototype() {
return QueryFilterBuilder.PROTOTYPE;
}
} }

View File

@ -77,27 +77,29 @@ public class ShardStateAction extends AbstractComponent {
transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler()); transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler());
} }
public void shardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure) { public void shardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) {
DiscoveryNode masterNode = clusterService.state().nodes().masterNode(); DiscoveryNode masterNode = clusterService.state().nodes().masterNode();
if (masterNode == null) { if (masterNode == null) {
logger.warn("can't send shard failed for {}, no master known.", shardRouting); logger.warn("can't send shard failed for {}, no master known.", shardRouting);
listener.onShardFailedNoMaster();
return; return;
} }
innerShardFailed(shardRouting, indexUUID, masterNode, message, failure); innerShardFailed(shardRouting, indexUUID, masterNode, message, failure, listener);
} }
public void resendShardFailed(final ShardRouting shardRouting, final String indexUUID, final DiscoveryNode masterNode, final String message, @Nullable final Throwable failure) { public void resendShardFailed(final ShardRouting shardRouting, final String indexUUID, final DiscoveryNode masterNode, final String message, @Nullable final Throwable failure, Listener listener) {
logger.trace("{} re-sending failed shard for {}, indexUUID [{}], reason [{}]", failure, shardRouting.shardId(), shardRouting, indexUUID, message); logger.trace("{} re-sending failed shard for {}, indexUUID [{}], reason [{}]", failure, shardRouting.shardId(), shardRouting, indexUUID, message);
innerShardFailed(shardRouting, indexUUID, masterNode, message, failure); innerShardFailed(shardRouting, indexUUID, masterNode, message, failure, listener);
} }
private void innerShardFailed(final ShardRouting shardRouting, final String indexUUID, final DiscoveryNode masterNode, final String message, final Throwable failure) { private void innerShardFailed(final ShardRouting shardRouting, final String indexUUID, final DiscoveryNode masterNode, final String message, final Throwable failure, Listener listener) {
ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, failure); ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, failure);
transportService.sendRequest(masterNode, transportService.sendRequest(masterNode,
SHARD_FAILED_ACTION_NAME, shardRoutingEntry, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { SHARD_FAILED_ACTION_NAME, shardRoutingEntry, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
@Override @Override
public void handleException(TransportException exp) { public void handleException(TransportException exp) {
logger.warn("failed to send failed shard to {}", exp, masterNode); logger.warn("failed to send failed shard to {}", exp, masterNode);
listener.onShardFailedFailure(masterNode, exp);
} }
}); });
} }
@ -284,4 +286,9 @@ public class ShardStateAction extends AbstractComponent {
return "" + shardRouting + ", indexUUID [" + indexUUID + "], message [" + message + "], failure [" + ExceptionsHelper.detailedMessage(failure) + "]"; return "" + shardRouting + ", indexUUID [" + indexUUID + "], message [" + message + "], failure [" + ExceptionsHelper.detailedMessage(failure) + "]";
} }
} }
public interface Listener {
default void onShardFailedNoMaster() {}
default void onShardFailedFailure(final DiscoveryNode master, final TransportException e) {}
}
} }

View File

@ -76,7 +76,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
} }
/** /**
* Translates the provided index expression into actual concrete indices. * Translates the provided index expression into actual concrete indices, properly deduplicated.
* *
* @param state the cluster state containing all the data to resolve to expressions to concrete indices * @param state the cluster state containing all the data to resolve to expressions to concrete indices
* @param options defines how the aliases or indices need to be resolved to concrete indices * @param options defines how the aliases or indices need to be resolved to concrete indices
@ -94,7 +94,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
} }
/** /**
* Translates the provided index expression into actual concrete indices. * Translates the provided index expression into actual concrete indices, properly deduplicated.
* *
* @param state the cluster state containing all the data to resolve to expressions to concrete indices * @param state the cluster state containing all the data to resolve to expressions to concrete indices
* @param options defines how the aliases or indices need to be resolved to concrete indices * @param options defines how the aliases or indices need to be resolved to concrete indices
@ -141,7 +141,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
} }
} }
List<String> concreteIndices = new ArrayList<>(expressions.size()); final Set<String> concreteIndices = new HashSet<>(expressions.size());
for (String expression : expressions) { for (String expression : expressions) {
AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(expression); AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(expression);
if (aliasOrIndex == null) { if (aliasOrIndex == null) {

View File

@ -37,9 +37,9 @@ import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import java.util.Arrays;
import java.util.Collection;
import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
@ -66,9 +66,11 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
this.nodeIndexDeletedAction = nodeIndexDeletedAction; this.nodeIndexDeletedAction = nodeIndexDeletedAction;
} }
public void deleteIndex(final Request request, final Listener userListener) { public void deleteIndices(final Request request, final Listener userListener) {
Collection<String> indices = Arrays.asList(request.indices);
final DeleteIndexListener listener = new DeleteIndexListener(userListener); final DeleteIndexListener listener = new DeleteIndexListener(userListener);
clusterService.submitStateUpdateTask("delete-index [" + request.index + "]", Priority.URGENT, new ClusterStateUpdateTask() {
clusterService.submitStateUpdateTask("delete-index " + indices, Priority.URGENT, new ClusterStateUpdateTask() {
@Override @Override
public TimeValue timeout() { public TimeValue timeout() {
@ -82,34 +84,32 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
@Override @Override
public ClusterState execute(final ClusterState currentState) { public ClusterState execute(final ClusterState currentState) {
if (!currentState.metaData().hasConcreteIndex(request.index)) {
throw new IndexNotFoundException(request.index);
}
logger.info("[{}] deleting index", request.index);
RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable()); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable());
routingTableBuilder.remove(request.index); MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData());
ClusterBlocks.Builder clusterBlocksBuilder = ClusterBlocks.builder().blocks(currentState.blocks());
MetaData newMetaData = MetaData.builder(currentState.metaData()) for (final String index: indices) {
.remove(request.index) if (!currentState.metaData().hasConcreteIndex(index)) {
.build(); throw new IndexNotFoundException(index);
}
RoutingAllocation.Result routingResult = allocationService.reroute( logger.debug("[{}] deleting index", index);
ClusterState.builder(currentState).routingTable(routingTableBuilder.build()).metaData(newMetaData).build());
ClusterBlocks blocks = ClusterBlocks.builder().blocks(currentState.blocks()).removeIndexBlocks(request.index).build();
routingTableBuilder.remove(index);
clusterBlocksBuilder.removeIndexBlocks(index);
metaDataBuilder.remove(index);
}
// wait for events from all nodes that it has been removed from their respective metadata... // wait for events from all nodes that it has been removed from their respective metadata...
int count = currentState.nodes().size(); int count = currentState.nodes().size();
// add the notifications that the store was deleted from *data* nodes // add the notifications that the store was deleted from *data* nodes
count += currentState.nodes().dataNodes().size(); count += currentState.nodes().dataNodes().size();
final AtomicInteger counter = new AtomicInteger(count); final AtomicInteger counter = new AtomicInteger(count * indices.size());
// this listener will be notified once we get back a notification based on the cluster state change below. // this listener will be notified once we get back a notification based on the cluster state change below.
final NodeIndexDeletedAction.Listener nodeIndexDeleteListener = new NodeIndexDeletedAction.Listener() { final NodeIndexDeletedAction.Listener nodeIndexDeleteListener = new NodeIndexDeletedAction.Listener() {
@Override @Override
public void onNodeIndexDeleted(String index, String nodeId) { public void onNodeIndexDeleted(String deleted, String nodeId) {
if (index.equals(request.index)) { if (indices.contains(deleted)) {
if (counter.decrementAndGet() == 0) { if (counter.decrementAndGet() == 0) {
listener.onResponse(new Response(true)); listener.onResponse(new Response(true));
nodeIndexDeletedAction.remove(this); nodeIndexDeletedAction.remove(this);
@ -118,8 +118,8 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
} }
@Override @Override
public void onNodeIndexStoreDeleted(String index, String nodeId) { public void onNodeIndexStoreDeleted(String deleted, String nodeId) {
if (index.equals(request.index)) { if (indices.contains(deleted)) {
if (counter.decrementAndGet() == 0) { if (counter.decrementAndGet() == 0) {
listener.onResponse(new Response(true)); listener.onResponse(new Response(true));
nodeIndexDeletedAction.remove(this); nodeIndexDeletedAction.remove(this);
@ -128,15 +128,15 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
} }
}; };
nodeIndexDeletedAction.add(nodeIndexDeleteListener); nodeIndexDeletedAction.add(nodeIndexDeleteListener);
listener.future = threadPool.schedule(request.timeout, ThreadPool.Names.SAME, () -> {
listener.future = threadPool.schedule(request.timeout, ThreadPool.Names.SAME, new Runnable() { listener.onResponse(new Response(false));
@Override nodeIndexDeletedAction.remove(nodeIndexDeleteListener);
public void run() {
listener.onResponse(new Response(false));
nodeIndexDeletedAction.remove(nodeIndexDeleteListener);
}
}); });
MetaData newMetaData = metaDataBuilder.build();
ClusterBlocks blocks = clusterBlocksBuilder.build();
RoutingAllocation.Result routingResult = allocationService.reroute(
ClusterState.builder(currentState).routingTable(routingTableBuilder.build()).metaData(newMetaData).build());
return ClusterState.builder(currentState).routingResult(routingResult).metaData(newMetaData).blocks(blocks).build(); return ClusterState.builder(currentState).routingResult(routingResult).metaData(newMetaData).blocks(blocks).build();
} }
@ -173,7 +173,6 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
} }
} }
public interface Listener { public interface Listener {
void onResponse(Response response); void onResponse(Response response);
@ -183,13 +182,13 @@ public class MetaDataDeleteIndexService extends AbstractComponent {
public static class Request { public static class Request {
final String index; final String[] indices;
TimeValue timeout = TimeValue.timeValueSeconds(10); TimeValue timeout = TimeValue.timeValueSeconds(10);
TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT;
public Request(String index) { public Request(String[] indices) {
this.index = index; this.indices = indices;
} }
public Request timeout(TimeValue timeout) { public Request timeout(TimeValue timeout) {

View File

@ -25,12 +25,11 @@ import org.elasticsearch.common.util.concurrent.ReleasableLock;
import java.util.*; import java.util.*;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.BiFunction;
import java.util.function.ToLongBiFunction; import java.util.function.ToLongBiFunction;
/** /**
@ -175,7 +174,7 @@ public class Cache<K, V> {
ReleasableLock readLock = new ReleasableLock(segmentLock.readLock()); ReleasableLock readLock = new ReleasableLock(segmentLock.readLock());
ReleasableLock writeLock = new ReleasableLock(segmentLock.writeLock()); ReleasableLock writeLock = new ReleasableLock(segmentLock.writeLock());
Map<K, Future<Entry<K, V>>> map = new HashMap<>(); Map<K, CompletableFuture<Entry<K, V>>> map = new HashMap<>();
SegmentStats segmentStats = new SegmentStats(); SegmentStats segmentStats = new SegmentStats();
@ -187,20 +186,28 @@ public class Cache<K, V> {
* @return the entry if there was one, otherwise null * @return the entry if there was one, otherwise null
*/ */
Entry<K, V> get(K key, long now) { Entry<K, V> get(K key, long now) {
Future<Entry<K, V>> future; CompletableFuture<Entry<K, V>> future;
Entry<K, V> entry = null; Entry<K, V> entry = null;
try (ReleasableLock ignored = readLock.acquire()) { try (ReleasableLock ignored = readLock.acquire()) {
future = map.get(key); future = map.get(key);
} }
if (future != null) { if (future != null) {
segmentStats.hit(); try {
try { entry = future.handle((ok, ex) -> {
entry = future.get(); if (ok != null) {
entry.accessTime = now; segmentStats.hit();
} catch (ExecutionException | InterruptedException e) { ok.accessTime = now;
throw new IllegalStateException("future should be a completedFuture for which get should not throw", e); return ok;
} } else {
} else { segmentStats.miss();
return null;
}
}).get();
} catch (ExecutionException | InterruptedException e) {
throw new IllegalStateException(e);
}
}
else {
segmentStats.miss(); segmentStats.miss();
} }
return entry; return entry;
@ -216,11 +223,19 @@ public class Cache<K, V> {
*/ */
Tuple<Entry<K, V>, Entry<K, V>> put(K key, V value, long now) { Tuple<Entry<K, V>, Entry<K, V>> put(K key, V value, long now) {
Entry<K, V> entry = new Entry<>(key, value, now); Entry<K, V> entry = new Entry<>(key, value, now);
Entry<K, V> existing; Entry<K, V> existing = null;
try (ReleasableLock ignored = writeLock.acquire()) { try (ReleasableLock ignored = writeLock.acquire()) {
try { try {
Future<Entry<K, V>> future = map.put(key, CompletableFuture.completedFuture(entry)); CompletableFuture<Entry<K, V>> future = map.put(key, CompletableFuture.completedFuture(entry));
existing = future != null ? future.get() : null; if (future != null) {
existing = future.handle((ok, ex) -> {
if (ok != null) {
return ok;
} else {
return null;
}
}).get();
}
} catch (ExecutionException | InterruptedException e) { } catch (ExecutionException | InterruptedException e) {
throw new IllegalStateException("future should be a completedFuture for which get should not throw", e); throw new IllegalStateException("future should be a completedFuture for which get should not throw", e);
} }
@ -235,17 +250,23 @@ public class Cache<K, V> {
* @return the removed entry if there was one, otherwise null * @return the removed entry if there was one, otherwise null
*/ */
Entry<K, V> remove(K key) { Entry<K, V> remove(K key) {
Future<Entry<K, V>> future; CompletableFuture<Entry<K, V>> future;
Entry<K, V> entry = null; Entry<K, V> entry = null;
try (ReleasableLock ignored = writeLock.acquire()) { try (ReleasableLock ignored = writeLock.acquire()) {
future = map.remove(key); future = map.remove(key);
} }
if (future != null) { if (future != null) {
segmentStats.eviction();
try { try {
entry = future.get(); entry = future.handle((ok, ex) -> {
if (ok != null) {
segmentStats.eviction();
return ok;
} else {
return null;
}
}).get();
} catch (ExecutionException | InterruptedException e) { } catch (ExecutionException | InterruptedException e) {
throw new IllegalStateException("future should be a completedFuture for which get should not throw", e); throw new IllegalStateException(e);
} }
} }
return entry; return entry;
@ -327,39 +348,57 @@ public class Cache<K, V> {
// the segment lock; to do this, we atomically put a future in the map that can load the value, and then // the segment lock; to do this, we atomically put a future in the map that can load the value, and then
// get the value from this future on the thread that won the race to place the future into the segment map // get the value from this future on the thread that won the race to place the future into the segment map
CacheSegment<K, V> segment = getCacheSegment(key); CacheSegment<K, V> segment = getCacheSegment(key);
Future<Entry<K, V>> future; CompletableFuture<Entry<K, V>> future;
FutureTask<Entry<K, V>> task = new FutureTask<>(() -> new Entry<>(key, loader.load(key), now)); CompletableFuture<Entry<K, V>> completableFuture = new CompletableFuture<>();
try (ReleasableLock ignored = segment.writeLock.acquire()) { try (ReleasableLock ignored = segment.writeLock.acquire()) {
future = segment.map.putIfAbsent(key, task); future = segment.map.putIfAbsent(key, completableFuture);
}
if (future == null) {
future = task;
task.run();
} }
Entry<K, V> entry; BiFunction<? super Entry<K, V>, Throwable, ? extends V> handler = (ok, ex) -> {
try { if (ok != null) {
entry = future.get(); try (ReleasableLock ignored = lruLock.acquire()) {
} catch (ExecutionException | InterruptedException e) { promote(ok, now);
// if the future ended exceptionally, we do not want to pollute the cache
// however, we have to take care to ensure that the polluted entry has not already been replaced
try (ReleasableLock ignored = segment.writeLock.acquire()) {
Future<Entry<K, V>> sanity = segment.map.get(key);
try {
sanity.get();
} catch (ExecutionException | InterruptedException gotcha) {
segment.map.remove(key);
} }
return ok.value;
} else {
try (ReleasableLock ignored = segment.writeLock.acquire()) {
CompletableFuture<Entry<K, V>> sanity = segment.map.get(key);
if (sanity != null && sanity.isCompletedExceptionally()) {
segment.map.remove(key);
}
}
return null;
} }
throw (e instanceof ExecutionException) ? (ExecutionException)e : new ExecutionException(e); };
CompletableFuture<V> completableValue;
if (future == null) {
future = completableFuture;
completableValue = future.handle(handler);
V loaded;
try {
loaded = loader.load(key);
} catch (Exception e) {
future.completeExceptionally(e);
throw new ExecutionException(e);
}
if (loaded == null) {
NullPointerException npe = new NullPointerException("loader returned a null value");
future.completeExceptionally(npe);
throw new ExecutionException(npe);
} else {
future.complete(new Entry<>(key, loaded, now));
}
} else {
completableValue = future.handle(handler);
} }
if (entry.value == null) {
throw new ExecutionException(new NullPointerException("loader returned a null value")); try {
value = completableValue.get();
} catch (InterruptedException e) {
throw new IllegalStateException(e);
} }
try (ReleasableLock ignored = lruLock.acquire()) {
promote(entry, now);
}
value = entry.value;
} }
return value; return value;
} }

View File

@ -20,8 +20,8 @@
package org.elasticsearch.common.geo; package org.elasticsearch.common.geo;
import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.XGeoHashUtils; import org.apache.lucene.util.GeoHashUtils;
import org.apache.lucene.util.XGeoUtils; import org.apache.lucene.util.GeoUtils;
/** /**
* *
@ -81,14 +81,14 @@ public final class GeoPoint {
} }
public GeoPoint resetFromIndexHash(long hash) { public GeoPoint resetFromIndexHash(long hash) {
lon = XGeoUtils.mortonUnhashLon(hash); lon = GeoUtils.mortonUnhashLon(hash);
lat = XGeoUtils.mortonUnhashLat(hash); lat = GeoUtils.mortonUnhashLat(hash);
return this; return this;
} }
public GeoPoint resetFromGeoHash(String geohash) { public GeoPoint resetFromGeoHash(String geohash) {
final long hash = XGeoHashUtils.mortonEncode(geohash); final long hash = GeoHashUtils.mortonEncode(geohash);
return this.reset(XGeoUtils.mortonUnhashLat(hash), XGeoUtils.mortonUnhashLon(hash)); return this.reset(GeoUtils.mortonUnhashLat(hash), GeoUtils.mortonUnhashLon(hash));
} }
public GeoPoint resetFromGeoHash(long geohashLong) { public GeoPoint resetFromGeoHash(long geohashLong) {
@ -113,11 +113,11 @@ public final class GeoPoint {
} }
public final String geohash() { public final String geohash() {
return XGeoHashUtils.stringEncode(lon, lat); return GeoHashUtils.stringEncode(lon, lat);
} }
public final String getGeohash() { public final String getGeohash() {
return XGeoHashUtils.stringEncode(lon, lat); return GeoHashUtils.stringEncode(lon, lat);
} }
@Override @Override

View File

@ -28,11 +28,7 @@ import java.util.Collection;
import java.util.List; import java.util.List;
/** /**
* Overrides bounding box logic in ShapeCollection base class to comply with * Extends spatial4j ShapeCollection for points_only shape indexing support
* OGC OpenGIS Abstract Specification: An Object Model for Interoperable Geoprocessing.
*
* NOTE: This algorithm is O(N) and can possibly be improved O(log n) using an internal R*-Tree
* data structure for a collection of bounding boxes
*/ */
public class XShapeCollection<S extends Shape> extends ShapeCollection<S> { public class XShapeCollection<S extends Shape> extends ShapeCollection<S> {
@ -49,42 +45,4 @@ public class XShapeCollection<S extends Shape> extends ShapeCollection<S> {
public void setPointsOnly(boolean pointsOnly) { public void setPointsOnly(boolean pointsOnly) {
this.pointsOnly = pointsOnly; this.pointsOnly = pointsOnly;
} }
@Override
protected Rectangle computeBoundingBox(Collection<? extends Shape> shapes, SpatialContext ctx) {
Rectangle retBox = shapes.iterator().next().getBoundingBox();
for (Shape geom : shapes) {
retBox = expandBBox(retBox, geom.getBoundingBox());
}
return retBox;
}
/**
* Spatial4J shapes have no knowledge of directed edges. For this reason, a bounding box
* that wraps the dateline can have a min longitude that is mathematically &gt; than the
* Rectangles' minX value. This is an issue for geometric collections (e.g., MultiPolygon
* and ShapeCollection) Until geometry logic can be cleaned up in Spatial4J, ES provides
* the following expansion algorithm for GeometryCollections
*/
private Rectangle expandBBox(Rectangle bbox, Rectangle expand) {
if (bbox.equals(expand) || bbox.equals(SpatialContext.GEO.getWorldBounds())) {
return bbox;
}
double minX = bbox.getMinX();
double eMinX = expand.getMinX();
double maxX = bbox.getMaxX();
double eMaxX = expand.getMaxX();
double minY = bbox.getMinY();
double eMinY = expand.getMinY();
double maxY = bbox.getMaxY();
double eMaxY = expand.getMaxY();
bbox.reset(Math.min(Math.min(minX, maxX), Math.min(eMinX, eMaxX)),
Math.max(Math.max(minX, maxX), Math.max(eMinX, eMaxX)),
Math.min(Math.min(minY, maxY), Math.min(eMinY, eMaxY)),
Math.max(Math.max(minY, maxY), Math.max(eMinY, eMaxY)));
return bbox;
}
} }

View File

@ -60,9 +60,9 @@ public class Lucene {
public static final Version VERSION = Version.LATEST; public static final Version VERSION = Version.LATEST;
public static final Version ANALYZER_VERSION = VERSION; public static final Version ANALYZER_VERSION = VERSION;
public static final Version QUERYPARSER_VERSION = VERSION; public static final Version QUERYPARSER_VERSION = VERSION;
public static final String LATEST_DOC_VALUES_FORMAT = "Lucene50"; public static final String LATEST_DOC_VALUES_FORMAT = "Lucene54";
public static final String LATEST_POSTINGS_FORMAT = "Lucene50"; public static final String LATEST_POSTINGS_FORMAT = "Lucene50";
public static final String LATEST_CODEC = "Lucene53"; public static final String LATEST_CODEC = "Lucene54";
static { static {
Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class); Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class);

View File

@ -87,4 +87,10 @@ public interface Discovery extends LifecycleComponent<Discovery> {
super(msg, cause, args); super(msg, cause, args);
} }
} }
/**
* @return stats about the discovery
*/
DiscoveryStats stats();
} }

View File

@ -0,0 +1,78 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.discovery;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats;
import java.io.IOException;
public class DiscoveryStats implements Streamable, ToXContent {
@Nullable
private PendingClusterStateStats queueStats;
public DiscoveryStats(PendingClusterStateStats queueStats) {
this.queueStats = queueStats;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.DISCOVERY);
if (queueStats != null ){
queueStats.toXContent(builder, params);
}
builder.endObject();
return builder;
}
@Override
public void readFrom(StreamInput in) throws IOException {
if (in.readBoolean()) {
queueStats = new PendingClusterStateStats();
queueStats.readFrom(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (queueStats != null ) {
out.writeBoolean(true);
queueStats.writeTo(out);
}else{
out.writeBoolean(false);
}
}
static final class Fields {
static final XContentBuilderString DISCOVERY = new XContentBuilderString("discovery");
}
public PendingClusterStateStats getQueueStats() {
return queueStats;
}
}

View File

@ -316,6 +316,11 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem
} }
} }
@Override
public DiscoveryStats stats() {
return new DiscoveryStats(null);
}
private LocalDiscovery[] members() { private LocalDiscovery[] members() {
ClusterGroup clusterGroup = clusterGroups.get(clusterName); ClusterGroup clusterGroup = clusterGroups.get(clusterName);
if (clusterGroup == null) { if (clusterGroup == null) {

View File

@ -43,6 +43,8 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.discovery.DiscoveryStats;
import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats;
import org.elasticsearch.discovery.InitialStateDiscoveryListener; import org.elasticsearch.discovery.InitialStateDiscoveryListener;
import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.elect.ElectMasterService;
import org.elasticsearch.discovery.zen.fd.MasterFaultDetection; import org.elasticsearch.discovery.zen.fd.MasterFaultDetection;
@ -337,6 +339,12 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen
} }
} }
@Override
public DiscoveryStats stats() {
PendingClusterStateStats queueStats = publishClusterState.pendingStatesQueue().stats();
return new DiscoveryStats(queueStats);
}
/** /**
* returns true if zen discovery is started and there is a currently a background thread active for (re)joining * returns true if zen discovery is started and there is a currently a background thread active for (re)joining
* the cluster used for testing. * the cluster used for testing.

View File

@ -0,0 +1,97 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.discovery.zen.publish;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import java.io.IOException;
/**
* Class encapsulating stats about the PendingClusterStatsQueue
*/
public class PendingClusterStateStats implements Streamable, ToXContent {
private int total;
private int pending;
private int committed;
public PendingClusterStateStats() {
}
public PendingClusterStateStats(int total, int pending, int committed) {
this.total = total;
this.pending = pending;
this.committed = committed;
}
public int getCommitted() {
return committed;
}
public int getPending() {
return pending;
}
public int getTotal() {
return total;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.QUEUE);
builder.field(Fields.TOTAL, total);
builder.field(Fields.PENDING, pending);
builder.field(Fields.COMMITTED, committed);
builder.endObject();
return builder;
}
@Override
public void readFrom(StreamInput in) throws IOException {
total = in.readVInt();
pending = in.readVInt();
committed = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(total);
out.writeVInt(pending);
out.writeVInt(committed);
}
static final class Fields {
static final XContentBuilderString QUEUE = new XContentBuilderString("cluster_state_queue");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString PENDING = new XContentBuilderString("pending");
static final XContentBuilderString COMMITTED = new XContentBuilderString("committed");
}
@Override
public String toString() {
return "PendingClusterStateStats(total=" + total + ", pending=" + pending + ", committed=" + committed + ")";
}
}

View File

@ -283,4 +283,17 @@ public class PendingClusterStatesQueue {
} }
} }
public synchronized PendingClusterStateStats stats() {
// calculate committed cluster state
int committed = 0;
for (ClusterStateContext clusterStatsContext : pendingStates) {
if (clusterStatsContext.committed()) {
committed += 1;
}
}
return new PendingClusterStateStats(pendingStates.size(), pendingStates.size() - committed, committed);
}
} }

View File

@ -19,9 +19,14 @@
package org.elasticsearch.index; package org.elasticsearch.index;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.util.Providers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.query.QueryCache;
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.EngineFactory;
import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
@ -33,6 +38,8 @@ import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
import java.util.*; import java.util.*;
import java.util.function.BiFunction; import java.util.function.BiFunction;
@ -50,25 +57,37 @@ import java.util.function.Consumer;
* <li>Settings update listener - Custom settings update listener can be registered via {@link #addIndexSettingsListener(Consumer)}</li> * <li>Settings update listener - Custom settings update listener can be registered via {@link #addIndexSettingsListener(Consumer)}</li>
* </ul> * </ul>
*/ */
public class IndexModule extends AbstractModule { public final class IndexModule extends AbstractModule {
public static final String STORE_TYPE = "index.store.type"; public static final String STORE_TYPE = "index.store.type";
public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity"; public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity";
public static final String INDEX_QUERY_CACHE = "index";
public static final String NONE_QUERY_CACHE = "none";
public static final String QUERY_CACHE_TYPE = "index.queries.cache.type";
// for test purposes only
public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
private final IndexSettings indexSettings; private final IndexSettings indexSettings;
private final IndexStoreConfig indexStoreConfig; private final IndexStoreConfig indexStoreConfig;
private final IndicesQueryCache indicesQueryCache;
// pkg private so tests can mock // pkg private so tests can mock
Class<? extends EngineFactory> engineFactoryImpl = InternalEngineFactory.class; Class<? extends EngineFactory> engineFactoryImpl = InternalEngineFactory.class;
Class<? extends IndexSearcherWrapper> indexSearcherWrapper = null; private SetOnce<IndexSearcherWrapperFactory> indexSearcherWrapper = new SetOnce<>();
private final Set<Consumer<Settings>> settingsConsumers = new HashSet<>(); private final Set<Consumer<Settings>> settingsConsumers = new HashSet<>();
private final Set<IndexEventListener> indexEventListeners = new HashSet<>(); private final Set<IndexEventListener> indexEventListeners = new HashSet<>();
private IndexEventListener listener; private IndexEventListener listener;
private final Map<String, BiFunction<String, Settings, SimilarityProvider>> similarities = new HashMap<>(); private final Map<String, BiFunction<String, Settings, SimilarityProvider>> similarities = new HashMap<>();
private final Map<String, BiFunction<IndexSettings, IndexStoreConfig, IndexStore>> storeTypes = new HashMap<>(); private final Map<String, BiFunction<IndexSettings, IndexStoreConfig, IndexStore>> storeTypes = new HashMap<>();
private final Map<String, BiFunction<IndexSettings, IndicesQueryCache, QueryCache>> queryCaches = new HashMap<>();
private IndicesWarmer indicesWarmer;
public IndexModule(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig) { public IndexModule(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig, IndicesQueryCache indicesQueryCache, IndicesWarmer warmer) {
this.indexStoreConfig = indexStoreConfig; this.indexStoreConfig = indexStoreConfig;
this.indexSettings = indexSettings; this.indexSettings = indexSettings;
this.indicesQueryCache = indicesQueryCache;
this.indicesWarmer = warmer;
registerQueryCache(INDEX_QUERY_CACHE, IndexQueryCache::new);
registerQueryCache(NONE_QUERY_CACHE, (a, b) -> new NoneQueryCache(a));
} }
/** /**
@ -155,6 +174,28 @@ public class IndexModule extends AbstractModule {
similarities.put(name, similarity); similarities.put(name, similarity);
} }
/**
* Registers a {@link QueryCache} provider for a given name
* @param name the providers / caches name
* @param provider the provider instance
*/
public void registerQueryCache(String name, BiFunction<IndexSettings, IndicesQueryCache, QueryCache> provider) {
if (provider == null) {
throw new IllegalArgumentException("provider must not be null");
}
if (queryCaches.containsKey(name)) {
throw new IllegalArgumentException("Can't register the same [query_cache] more than once for [" + name + "]");
}
queryCaches.put(name, provider);
}
/**
* Sets a {@link org.elasticsearch.index.IndexModule.IndexSearcherWrapperFactory} that is called once the IndexService is fully constructed.
* Note: this method can only be called once per index. Multiple wrappers are not supported.
*/
public void setSearcherWrapper(IndexSearcherWrapperFactory indexSearcherWrapperFactory) {
this.indexSearcherWrapper.set(indexSearcherWrapperFactory);
}
public IndexEventListener freeze() { public IndexEventListener freeze() {
// TODO somehow we need to make this pkg private... // TODO somehow we need to make this pkg private...
@ -176,11 +217,7 @@ public class IndexModule extends AbstractModule {
@Override @Override
protected void configure() { protected void configure() {
bind(EngineFactory.class).to(engineFactoryImpl).asEagerSingleton(); bind(EngineFactory.class).to(engineFactoryImpl).asEagerSingleton();
if (indexSearcherWrapper == null) { bind(IndexSearcherWrapperFactory.class).toInstance(indexSearcherWrapper.get() == null ? (shard) -> null : indexSearcherWrapper.get());
bind(IndexSearcherWrapper.class).toProvider(Providers.of(null));
} else {
bind(IndexSearcherWrapper.class).to(indexSearcherWrapper).asEagerSingleton();
}
bind(IndexEventListener.class).toInstance(freeze()); bind(IndexEventListener.class).toInstance(freeze());
bind(IndexService.class).asEagerSingleton(); bind(IndexService.class).asEagerSingleton();
bind(IndexServicesProvider.class).asEagerSingleton(); bind(IndexServicesProvider.class).asEagerSingleton();
@ -203,6 +240,15 @@ public class IndexModule extends AbstractModule {
throw new IllegalStateException("store must not be null"); throw new IllegalStateException("store must not be null");
} }
} }
final String queryCacheType = settings.getSettings().get(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE);
BiFunction<IndexSettings, IndicesQueryCache, QueryCache> queryCacheProvider = queryCaches.get(queryCacheType);
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, indicesWarmer);
QueryCache queryCache = queryCacheProvider.apply(settings, indicesQueryCache);
IndexCache indexCache = new IndexCache(settings, queryCache, bitsetFilterCache);
bind(QueryCache.class).toInstance(queryCache);
bind(IndexCache.class).toInstance(indexCache);
bind(BitsetFilterCache.class).toInstance(bitsetFilterCache);
bind(IndexStore.class).toInstance(store); bind(IndexStore.class).toInstance(store);
bind(SimilarityService.class).toInstance(new SimilarityService(settings, similarities)); bind(SimilarityService.class).toInstance(new SimilarityService(settings, similarities));
} }
@ -224,4 +270,14 @@ public class IndexModule extends AbstractModule {
return getSettingsKey().equals(setting); return getSettingsKey().equals(setting);
} }
} }
/**
* Factory for creating new {@link IndexSearcherWrapper} instances
*/
public interface IndexSearcherWrapperFactory {
/**
* Returns a new IndexSearcherWrapper. This method is called once per index per node
*/
IndexSearcherWrapper newWrapper(final IndexService indexService);
}
} }

View File

@ -75,6 +75,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
private final IndicesService indicesServices; private final IndicesService indicesServices;
private final IndexServicesProvider indexServicesProvider; private final IndexServicesProvider indexServicesProvider;
private final IndexStore indexStore; private final IndexStore indexStore;
private final IndexSearcherWrapper searcherWrapper;
private volatile Map<Integer, IndexShard> shards = emptyMap(); private volatile Map<Integer, IndexShard> shards = emptyMap();
private final AtomicBoolean closed = new AtomicBoolean(false); private final AtomicBoolean closed = new AtomicBoolean(false);
private final AtomicBoolean deleted = new AtomicBoolean(false); private final AtomicBoolean deleted = new AtomicBoolean(false);
@ -88,7 +89,8 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
IndicesService indicesServices, IndicesService indicesServices,
IndexServicesProvider indexServicesProvider, IndexServicesProvider indexServicesProvider,
IndexStore indexStore, IndexStore indexStore,
IndexEventListener eventListener) { IndexEventListener eventListener,
IndexModule.IndexSearcherWrapperFactory wrapperFactory) {
super(indexSettings); super(indexSettings);
this.indexSettings = indexSettings; this.indexSettings = indexSettings;
this.analysisService = analysisService; this.analysisService = analysisService;
@ -101,6 +103,7 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
this.indexStore = indexStore; this.indexStore = indexStore;
indexFieldData.setListener(new FieldDataCacheListener(this)); indexFieldData.setListener(new FieldDataCacheListener(this));
bitSetFilterCache.setListener(new BitsetCacheListener(this)); bitSetFilterCache.setListener(new BitsetCacheListener(this));
this.searcherWrapper = wrapperFactory.newWrapper(this);
} }
public int numberOfShards() { public int numberOfShards() {
@ -265,9 +268,9 @@ public class IndexService extends AbstractIndexComponent implements IndexCompone
(primary && IndexMetaData.isOnSharedFilesystem(indexSettings)); (primary && IndexMetaData.isOnSharedFilesystem(indexSettings));
store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> indexServicesProvider.getIndicesQueryCache().onClose(shardId))); store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> indexServicesProvider.getIndicesQueryCache().onClose(shardId)));
if (useShadowEngine(primary, indexSettings)) { if (useShadowEngine(primary, indexSettings)) {
indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, indexServicesProvider); indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, searcherWrapper, indexServicesProvider);
} else { } else {
indexShard = new IndexShard(shardId, this.indexSettings, path, store, indexServicesProvider); indexShard = new IndexShard(shardId, this.indexSettings, path, store, searcherWrapper, indexServicesProvider);
} }
eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created"); eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created");

View File

@ -56,12 +56,11 @@ public final class IndexServicesProvider {
private final SimilarityService similarityService; private final SimilarityService similarityService;
private final EngineFactory factory; private final EngineFactory factory;
private final BigArrays bigArrays; private final BigArrays bigArrays;
private final IndexSearcherWrapper indexSearcherWrapper;
private final IndexingMemoryController indexingMemoryController; private final IndexingMemoryController indexingMemoryController;
private final IndexEventListener listener; private final IndexEventListener listener;
@Inject @Inject
public IndexServicesProvider(IndexEventListener listener, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, @Nullable IndexSearcherWrapper indexSearcherWrapper, IndexingMemoryController indexingMemoryController) { public IndexServicesProvider(IndexEventListener listener, ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndicesQueryCache indicesQueryCache, CodecService codecService, TermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, @Nullable IndicesWarmer warmer, SimilarityService similarityService, EngineFactory factory, BigArrays bigArrays, IndexingMemoryController indexingMemoryController) {
this.listener = listener; this.listener = listener;
this.threadPool = threadPool; this.threadPool = threadPool;
this.mapperService = mapperService; this.mapperService = mapperService;
@ -75,7 +74,6 @@ public final class IndexServicesProvider {
this.similarityService = similarityService; this.similarityService = similarityService;
this.factory = factory; this.factory = factory;
this.bigArrays = bigArrays; this.bigArrays = bigArrays;
this.indexSearcherWrapper = indexSearcherWrapper;
this.indexingMemoryController = indexingMemoryController; this.indexingMemoryController = indexingMemoryController;
} }
@ -126,13 +124,7 @@ public final class IndexServicesProvider {
return factory; return factory;
} }
public BigArrays getBigArrays() { public BigArrays getBigArrays() { return bigArrays; }
return bigArrays;
}
public IndexSearcherWrapper getIndexSearcherWrapper() {
return indexSearcherWrapper;
}
public IndexingMemoryController getIndexingMemoryController() { public IndexingMemoryController getIndexingMemoryController() {
return indexingMemoryController; return indexingMemoryController;

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.cache; package org.elasticsearch.index.cache;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
@ -37,7 +36,6 @@ public class IndexCache extends AbstractIndexComponent implements Closeable {
private final QueryCache queryCache; private final QueryCache queryCache;
private final BitsetFilterCache bitsetFilterCache; private final BitsetFilterCache bitsetFilterCache;
@Inject
public IndexCache(IndexSettings indexSettings, QueryCache queryCache, BitsetFilterCache bitsetFilterCache) { public IndexCache(IndexSettings indexSettings, QueryCache queryCache, BitsetFilterCache bitsetFilterCache) {
super(indexSettings); super(indexSettings);
this.queryCache = queryCache; this.queryCache = queryCache;

View File

@ -1,59 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.query.QueryCache;
import org.elasticsearch.index.cache.query.index.IndexQueryCache;
import org.elasticsearch.index.cache.query.none.NoneQueryCache;
public class IndexCacheModule extends AbstractModule {
public static final String INDEX_QUERY_CACHE = "index";
public static final String NONE_QUERY_CACHE = "none";
public static final String QUERY_CACHE_TYPE = "index.queries.cache.type";
// for test purposes only
public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything";
private final Settings indexSettings;
private final ExtensionPoint.SelectedType<QueryCache> queryCaches;
public IndexCacheModule(Settings settings) {
this.indexSettings = settings;
this.queryCaches = new ExtensionPoint.SelectedType<>("query_cache", QueryCache.class);
registerQueryCache(INDEX_QUERY_CACHE, IndexQueryCache.class);
registerQueryCache(NONE_QUERY_CACHE, NoneQueryCache.class);
}
public void registerQueryCache(String name, Class<? extends QueryCache> clazz) {
queryCaches.registerExtension(name, clazz);
}
@Override
protected void configure() {
queryCaches.bindType(binder(), indexSettings, QUERY_CACHE_TYPE, INDEX_QUERY_CACHE);
bind(BitsetFilterCache.class).asEagerSingleton();
bind(IndexCache.class).asEagerSingleton();
}
}

View File

@ -32,16 +32,15 @@ import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalListener;
import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.cache.RemovalNotification;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
@ -50,7 +49,6 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils; import org.elasticsearch.index.shard.ShardUtils;
import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.IndicesWarmer.TerminationHandle; import org.elasticsearch.indices.IndicesWarmer.TerminationHandle;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
@ -68,7 +66,7 @@ import java.util.concurrent.Executor;
* and require that it should always be around should use this cache, otherwise the * and require that it should always be around should use this cache, otherwise the
* {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead. * {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead.
*/ */
public class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener<Object, Cache<Query, BitsetFilterCache.Value>>, Closeable { public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener<Object, Cache<Query, BitsetFilterCache.Value>>, Closeable {
public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly"; public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly";
private static final Listener DEFAULT_NOOP_LISTENER = new Listener() { private static final Listener DEFAULT_NOOP_LISTENER = new Listener() {
@ -85,20 +83,13 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
private final Cache<Object, Cache<Query, Value>> loadedFilters; private final Cache<Object, Cache<Query, Value>> loadedFilters;
private volatile Listener listener = DEFAULT_NOOP_LISTENER; private volatile Listener listener = DEFAULT_NOOP_LISTENER;
private final BitSetProducerWarmer warmer; private final BitSetProducerWarmer warmer;
private final IndicesWarmer indicesWarmer;
private IndicesWarmer indicesWarmer; public BitsetFilterCache(IndexSettings indexSettings, IndicesWarmer indicesWarmer) {
@Inject
public BitsetFilterCache(IndexSettings indexSettings) {
super(indexSettings); super(indexSettings);
this.loadRandomAccessFiltersEagerly = this.indexSettings.getSettings().getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true); this.loadRandomAccessFiltersEagerly = this.indexSettings.getSettings().getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true);
this.loadedFilters = CacheBuilder.<Object, Cache<Query, Value>>builder().removalListener(this).build(); this.loadedFilters = CacheBuilder.<Object, Cache<Query, Value>>builder().removalListener(this).build();
this.warmer = new BitSetProducerWarmer(); this.warmer = new BitSetProducerWarmer();
}
@Inject(optional = true)
public void setIndicesWarmer(IndicesWarmer indicesWarmer) {
this.indicesWarmer = indicesWarmer; this.indicesWarmer = indicesWarmer;
indicesWarmer.addListener(warmer); indicesWarmer.addListener(warmer);
} }
@ -129,10 +120,11 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
@Override @Override
public void close() { public void close() {
if (indicesWarmer != null) { try {
indicesWarmer.removeListener(warmer); indicesWarmer.removeListener(warmer);
} finally {
clear("close");
} }
clear("close");
} }
public void clear(String reason) { public void clear(String reason) {
@ -229,10 +221,10 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
} }
} }
final class BitSetProducerWarmer extends IndicesWarmer.Listener { final class BitSetProducerWarmer implements IndicesWarmer.Listener {
@Override @Override
public IndicesWarmer.TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, IndicesWarmer.WarmerContext context, ThreadPool threadPool) { public IndicesWarmer.TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) {
if (!loadRandomAccessFiltersEagerly) { if (!loadRandomAccessFiltersEagerly) {
return TerminationHandle.NO_WAIT; return TerminationHandle.NO_WAIT;
} }
@ -258,9 +250,9 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
warmUp.add(Queries.newNonNestedFilter()); warmUp.add(Queries.newNonNestedFilter());
} }
final Executor executor = threadPool.executor(executor()); final Executor executor = indicesWarmer.getExecutor();
final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size()); final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size() * warmUp.size());
for (final LeafReaderContext ctx : context.searcher().reader().leaves()) { for (final LeafReaderContext ctx : searcher.reader().leaves()) {
for (final Query filterToWarm : warmUp) { for (final Query filterToWarm : warmUp) {
executor.execute(() -> { executor.execute(() -> {
try { try {
@ -281,7 +273,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
} }
@Override @Override
public TerminationHandle warmTopReader(IndexShard indexShard, IndexMetaData indexMetaData, IndicesWarmer.WarmerContext context, ThreadPool threadPool) { public TerminationHandle warmTopReader(IndexShard indexShard, Engine.Searcher searcher) {
return TerminationHandle.NO_WAIT; return TerminationHandle.NO_WAIT;
} }

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.codec;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
import org.apache.lucene.codecs.lucene53.Lucene53Codec; import org.apache.lucene.codecs.lucene54.Lucene54Codec;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -56,8 +56,8 @@ public class CodecService extends AbstractIndexComponent {
this.mapperService = mapperService; this.mapperService = mapperService;
MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder(); MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
if (mapperService == null) { if (mapperService == null) {
codecs.put(DEFAULT_CODEC, new Lucene53Codec()); codecs.put(DEFAULT_CODEC, new Lucene54Codec());
codecs.put(BEST_COMPRESSION_CODEC, new Lucene53Codec(Mode.BEST_COMPRESSION)); codecs.put(BEST_COMPRESSION_CODEC, new Lucene54Codec(Mode.BEST_COMPRESSION));
} else { } else {
codecs.put(DEFAULT_CODEC, codecs.put(DEFAULT_CODEC,
new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger));

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.codec;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
import org.apache.lucene.codecs.lucene53.Lucene53Codec; import org.apache.lucene.codecs.lucene54.Lucene54Codec;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
* configured for a specific field the default postings format is used. * configured for a specific field the default postings format is used.
*/ */
// LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version // LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version
public class PerFieldMappingPostingFormatCodec extends Lucene53Codec { public class PerFieldMappingPostingFormatCodec extends Lucene54Codec {
private final ESLogger logger; private final ESLogger logger;
private final MapperService mapperService; private final MapperService mapperService;

View File

@ -1056,4 +1056,18 @@ public abstract class Engine implements Closeable {
public long getLastWriteNanos() { public long getLastWriteNanos() {
return this.lastWriteNanos; return this.lastWriteNanos;
} }
/**
* Called for each new opened engine searcher to warm new segments
* @see EngineConfig#getWarmer()
*/
public interface Warmer {
/**
* Called once a new Searcher is opened.
* @param searcher the searcer to warm
* @param isTopLevelReader <code>true</code> iff the searcher is build from a top-level reader.
* Otherwise the searcher might be build from a leaf reader to warm in isolation
*/
void warm(Engine.Searcher searcher, boolean isTopLevelReader);
}
} }

View File

@ -61,8 +61,7 @@ public final class EngineConfig {
private final String codecName; private final String codecName;
private final ThreadPool threadPool; private final ThreadPool threadPool;
private final ShardIndexingService indexingService; private final ShardIndexingService indexingService;
@Nullable private final Engine.Warmer warmer;
private final IndicesWarmer warmer;
private final Store store; private final Store store;
private final SnapshotDeletionPolicy deletionPolicy; private final SnapshotDeletionPolicy deletionPolicy;
private final MergePolicy mergePolicy; private final MergePolicy mergePolicy;
@ -116,7 +115,7 @@ public final class EngineConfig {
* Creates a new {@link org.elasticsearch.index.engine.EngineConfig} * Creates a new {@link org.elasticsearch.index.engine.EngineConfig}
*/ */
public EngineConfig(ShardId shardId, ThreadPool threadPool, ShardIndexingService indexingService, public EngineConfig(ShardId shardId, ThreadPool threadPool, ShardIndexingService indexingService,
Settings indexSettings, IndicesWarmer warmer, Store store, SnapshotDeletionPolicy deletionPolicy, Settings indexSettings, Engine.Warmer warmer, Store store, SnapshotDeletionPolicy deletionPolicy,
MergePolicy mergePolicy, MergeSchedulerConfig mergeSchedulerConfig, Analyzer analyzer, MergePolicy mergePolicy, MergeSchedulerConfig mergeSchedulerConfig, Analyzer analyzer,
Similarity similarity, CodecService codecService, Engine.EventListener eventListener, Similarity similarity, CodecService codecService, Engine.EventListener eventListener,
TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, TranslogConfig translogConfig, TimeValue flushMergesAfter) { TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, TranslogConfig translogConfig, TimeValue flushMergesAfter) {
@ -124,7 +123,7 @@ public final class EngineConfig {
this.indexSettings = indexSettings; this.indexSettings = indexSettings;
this.threadPool = threadPool; this.threadPool = threadPool;
this.indexingService = indexingService; this.indexingService = indexingService;
this.warmer = warmer; this.warmer = warmer == null ? (a,b) -> {} : warmer;
this.store = store; this.store = store;
this.deletionPolicy = deletionPolicy; this.deletionPolicy = deletionPolicy;
this.mergePolicy = mergePolicy; this.mergePolicy = mergePolicy;
@ -267,11 +266,9 @@ public final class EngineConfig {
} }
/** /**
* Returns an {@link org.elasticsearch.indices.IndicesWarmer} used to warm new searchers before they are used for searching. * Returns an {@link org.elasticsearch.index.engine.Engine.Warmer} used to warm new searchers before they are used for searching.
* Note: This method might retrun <code>null</code>
*/ */
@Nullable public Engine.Warmer getWarmer() {
public IndicesWarmer getWarmer() {
return warmer; return warmer;
} }

View File

@ -79,8 +79,7 @@ public class InternalEngine extends Engine {
private volatile long lastDeleteVersionPruneTimeMSec; private volatile long lastDeleteVersionPruneTimeMSec;
private final ShardIndexingService indexingService; private final ShardIndexingService indexingService;
@Nullable private final Engine.Warmer warmer;
private final IndicesWarmer warmer;
private final Translog translog; private final Translog translog;
private final ElasticsearchConcurrentMergeScheduler mergeScheduler; private final ElasticsearchConcurrentMergeScheduler mergeScheduler;
@ -930,8 +929,7 @@ public class InternalEngine extends Engine {
assert isMergedSegment(esLeafReader); assert isMergedSegment(esLeafReader);
if (warmer != null) { if (warmer != null) {
final Engine.Searcher searcher = new Searcher("warmer", searcherFactory.newSearcher(esLeafReader, null)); final Engine.Searcher searcher = new Searcher("warmer", searcherFactory.newSearcher(esLeafReader, null));
final IndicesWarmer.WarmerContext context = new IndicesWarmer.WarmerContext(shardId, searcher); warmer.warm(searcher, false);
warmer.warmNewReaders(context);
} }
} catch (Throwable t) { } catch (Throwable t) {
// Don't fail a merge if the warm-up failed // Don't fail a merge if the warm-up failed
@ -955,7 +953,7 @@ public class InternalEngine extends Engine {
/** Extended SearcherFactory that warms the segments if needed when acquiring a new searcher */ /** Extended SearcherFactory that warms the segments if needed when acquiring a new searcher */
final static class SearchFactory extends EngineSearcherFactory { final static class SearchFactory extends EngineSearcherFactory {
private final IndicesWarmer warmer; private final Engine.Warmer warmer;
private final ShardId shardId; private final ShardId shardId;
private final ESLogger logger; private final ESLogger logger;
private final AtomicBoolean isEngineClosed; private final AtomicBoolean isEngineClosed;
@ -1014,11 +1012,10 @@ public class InternalEngine extends Engine {
} }
if (newSearcher != null) { if (newSearcher != null) {
IndicesWarmer.WarmerContext context = new IndicesWarmer.WarmerContext(shardId, new Searcher("new_reader_warming", newSearcher)); warmer.warm(new Searcher("new_reader_warming", newSearcher), false);
warmer.warmNewReaders(context);
} }
assert searcher.getIndexReader() instanceof ElasticsearchDirectoryReader : "this class needs an ElasticsearchDirectoryReader but got: " + searcher.getIndexReader().getClass(); assert searcher.getIndexReader() instanceof ElasticsearchDirectoryReader : "this class needs an ElasticsearchDirectoryReader but got: " + searcher.getIndexReader().getClass();
warmer.warmTopReader(new IndicesWarmer.WarmerContext(shardId, new Searcher("top_reader_warming", searcher))); warmer.warm(new Searcher("top_reader_warming", searcher), true);
} catch (Throwable e) { } catch (Throwable e) {
if (isEngineClosed.get() == false) { if (isEngineClosed.get() == false) {
logger.warn("failed to prepare/warm", e); logger.warn("failed to prepare/warm", e);

View File

@ -25,7 +25,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.XGeoHashUtils; import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -82,7 +82,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
public static final boolean ENABLE_LATLON = false; public static final boolean ENABLE_LATLON = false;
public static final boolean ENABLE_GEOHASH = false; public static final boolean ENABLE_GEOHASH = false;
public static final boolean ENABLE_GEOHASH_PREFIX = false; public static final boolean ENABLE_GEOHASH_PREFIX = false;
public static final int GEO_HASH_PRECISION = XGeoHashUtils.PRECISION; public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit(false, false); public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit(false, false);
public static final Explicit<Boolean> COERCE = new Explicit(false, false); public static final Explicit<Boolean> COERCE = new Explicit(false, false);
@ -705,7 +705,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
} }
if (fieldType().isGeohashEnabled()) { if (fieldType().isGeohashEnabled()) {
if (geohash == null) { if (geohash == null) {
geohash = XGeoHashUtils.stringEncode(point.lon(), point.lat()); geohash = GeoHashUtils.stringEncode(point.lon(), point.lat());
} }
addGeohashField(context, geohash); addGeohashField(context, geohash);
} }

View File

@ -19,16 +19,14 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
public class FuzzyQueryParser implements QueryParser<FuzzyQueryBuilder> { public class FuzzyQueryParser implements QueryParser<FuzzyQueryBuilder> {
private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("min_similarity");
@Override @Override
public String[] names() { public String[] names() {
return new String[]{ FuzzyQueryBuilder.NAME }; return new String[]{ FuzzyQueryBuilder.NAME };
@ -68,7 +66,7 @@ public class FuzzyQueryParser implements QueryParser<FuzzyQueryBuilder> {
value = parser.objectBytes(); value = parser.objectBytes();
} else if ("boost".equals(currentFieldName)) { } else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue(); boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZINESS)) { } else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
fuzziness = Fuzziness.parse(parser); fuzziness = Fuzziness.parse(parser);
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) { } else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
prefixLength = parser.intValue(); prefixLength = parser.intValue();

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.util.XGeoHashUtils; import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
@ -131,7 +131,7 @@ public class GeohashCellQuery {
} }
public Builder point(double lat, double lon) { public Builder point(double lat, double lon) {
this.geohash = XGeoHashUtils.stringEncode(lon, lat); this.geohash = GeoHashUtils.stringEncode(lon, lat);
return this; return this;
} }
@ -205,7 +205,7 @@ public class GeohashCellQuery {
Query query; Query query;
if (neighbors) { if (neighbors) {
query = create(context, geoFieldType, geohash, XGeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8))); query = create(context, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
} else { } else {
query = create(context, geoFieldType, geohash, null); query = create(context, geoFieldType, geohash, null);
} }

View File

@ -34,7 +34,6 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
@ -213,29 +212,20 @@ public class IndexQueryParserService extends AbstractIndexComponent {
/** /**
* Selectively parses a query from a top level query or query_binary json field from the specified source. * Selectively parses a query from a top level query or query_binary json field from the specified source.
*/ */
public ParsedQuery parseQuery(BytesReference source) { public ParsedQuery parseTopLevelQuery(BytesReference source) {
XContentParser parser = null; XContentParser parser = null;
try { try {
parser = XContentHelper.createParser(source); parser = XContentFactory.xContent(source).createParser(source);
ParsedQuery parsedQuery = null; QueryShardContext queryShardContext = cache.get();
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { queryShardContext.reset(parser);
if (token == XContentParser.Token.FIELD_NAME) { queryShardContext.parseFieldMatcher(parseFieldMatcher);
String fieldName = parser.currentName(); try {
if ("query".equals(fieldName)) { QueryBuilder<?> queryBuilder = queryShardContext.parseContext().parseTopLevelQueryBuilder();
parsedQuery = parse(parser); Query query = toQuery(queryBuilder, queryShardContext);
} else if ("query_binary".equals(fieldName) || "queryBinary".equals(fieldName)) { return new ParsedQuery(query, queryShardContext.copyNamedQueries());
byte[] querySource = parser.binaryValue(); } finally {
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource); queryShardContext.reset(null);
parsedQuery = parse(qSourceParser);
} else {
throw new ParsingException(parser.getTokenLocation(), "request does not support [" + fieldName + "]");
}
}
} }
if (parsedQuery == null) {
throw new ParsingException(parser.getTokenLocation(), "Required query is missing");
}
return parsedQuery;
} catch (ParsingException | QueryShardException e) { } catch (ParsingException | QueryShardException e) {
throw e; throw e;
} catch (Throwable e) { } catch (Throwable e) {

View File

@ -32,8 +32,8 @@ import java.util.Collection;
*/ */
public class IndicesQueryParser implements QueryParser { public class IndicesQueryParser implements QueryParser {
private static final ParseField QUERY_FIELD = new ParseField("query", "filter"); private static final ParseField QUERY_FIELD = new ParseField("query");
private static final ParseField NO_MATCH_QUERY = new ParseField("no_match_query", "no_match_filter"); private static final ParseField NO_MATCH_QUERY = new ParseField("no_match_query");
@Override @Override
public String[] names() { public String[] names() {

View File

@ -20,16 +20,15 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.QueryInnerHits; import org.elasticsearch.index.query.support.QueryInnerHits;
import java.io.IOException; import java.io.IOException;
public class NestedQueryParser implements QueryParser<NestedQueryBuilder> { public class NestedQueryParser implements QueryParser<NestedQueryBuilder> {
private static final ParseField FILTER_FIELD = new ParseField("filter").withAllDeprecated("query");
private static final NestedQueryBuilder PROTOTYPE = new NestedQueryBuilder("", EmptyQueryBuilder.PROTOTYPE); private static final NestedQueryBuilder PROTOTYPE = new NestedQueryBuilder("", EmptyQueryBuilder.PROTOTYPE);
@Override @Override
@ -54,8 +53,6 @@ public class NestedQueryParser implements QueryParser<NestedQueryBuilder> {
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) { if ("query".equals(currentFieldName)) {
query = parseContext.parseInnerQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) {
query = parseContext.parseInnerQueryBuilder();
} else if ("inner_hits".equals(currentFieldName)) { } else if ("inner_hits".equals(currentFieldName)) {
queryInnerHits = new QueryInnerHits(parser); queryInnerHits = new QueryInnerHits(parser);
} else { } else {

View File

@ -1,111 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
/**
* A filter that simply wraps a query.
* @deprecated Useless now that queries and filters are merged: pass the
* query as a filter directly.
*/
//TODO: remove when https://github.com/elastic/elasticsearch/issues/13326 is fixed
@Deprecated
public class QueryFilterBuilder extends AbstractQueryBuilder<QueryFilterBuilder> {
public static final String NAME = "query";
private final QueryBuilder queryBuilder;
static final QueryFilterBuilder PROTOTYPE = new QueryFilterBuilder(EmptyQueryBuilder.PROTOTYPE);
/**
* A filter that simply wraps a query.
*
* @param queryBuilder The query to wrap as a filter
*/
public QueryFilterBuilder(QueryBuilder queryBuilder) {
if (queryBuilder == null) {
throw new IllegalArgumentException("inner query cannot be null");
}
this.queryBuilder = queryBuilder;
}
/**
* @return the query builder that is wrapped by this {@link QueryFilterBuilder}
*/
public QueryBuilder innerQuery() {
return this.queryBuilder;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(NAME);
queryBuilder.toXContent(builder, params);
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
// inner query builder can potentially be `null`, in that case we ignore it
Query innerQuery = this.queryBuilder.toQuery(context);
if (innerQuery == null) {
return null;
}
return new ConstantScoreQuery(innerQuery);
}
@Override
protected void setFinalBoost(Query query) {
//no-op this query doesn't support boost
}
@Override
protected int doHashCode() {
return Objects.hash(queryBuilder);
}
@Override
protected boolean doEquals(QueryFilterBuilder other) {
return Objects.equals(queryBuilder, other.queryBuilder);
}
@Override
protected QueryFilterBuilder doReadFrom(StreamInput in) throws IOException {
QueryBuilder innerQueryBuilder = in.readQuery();
return new QueryFilterBuilder(innerQueryBuilder);
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeQuery(queryBuilder);
}
@Override
public String getWriteableName() {
return NAME;
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
@ -65,9 +66,47 @@ public class QueryParseContext {
} }
/** /**
* @return a new QueryBuilder based on the current state of the parser * Parses a top level query including the query element that wraps it
*/ */
public QueryBuilder parseInnerQueryBuilder() throws IOException { public QueryBuilder<?> parseTopLevelQueryBuilder() {
try {
QueryBuilder<?> queryBuilder = null;
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
if (token == XContentParser.Token.FIELD_NAME) {
String fieldName = parser.currentName();
if ("query".equals(fieldName)) {
queryBuilder = parseInnerQueryBuilder();
} else if ("query_binary".equals(fieldName) || "queryBinary".equals(fieldName)) {
byte[] querySource = parser.binaryValue();
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
QueryParseContext queryParseContext = new QueryParseContext(indicesQueriesRegistry);
queryParseContext.reset(qSourceParser);
try {
queryParseContext.parseFieldMatcher(parseFieldMatcher);
queryBuilder = queryParseContext.parseInnerQueryBuilder();
} finally {
queryParseContext.reset(null);
}
} else {
throw new ParsingException(parser.getTokenLocation(), "request does not support [" + parser.currentName() + "]");
}
}
}
if (queryBuilder == null) {
throw new ParsingException(parser.getTokenLocation(), "Required query is missing");
}
return queryBuilder;
} catch (ParsingException e) {
throw e;
} catch (Throwable e) {
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse", e);
}
}
/**
* Parses a query excluding the query element that wraps it
*/
public QueryBuilder<?> parseInnerQueryBuilder() throws IOException {
// move to START object // move to START object
XContentParser.Token token; XContentParser.Token token;
if (parser.currentToken() != XContentParser.Token.START_OBJECT) { if (parser.currentToken() != XContentParser.Token.START_OBJECT) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
@ -35,8 +34,6 @@ import java.util.Map;
*/ */
public class QueryStringQueryParser implements QueryParser { public class QueryStringQueryParser implements QueryParser {
private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("fuzzy_min_sim");
@Override @Override
public String[] names() { public String[] names() {
return new String[]{QueryStringQueryBuilder.NAME, Strings.toCamelCase(QueryStringQueryBuilder.NAME)}; return new String[]{QueryStringQueryBuilder.NAME, Strings.toCamelCase(QueryStringQueryBuilder.NAME)};
@ -134,7 +131,7 @@ public class QueryStringQueryParser implements QueryParser {
fuzzyRewrite = parser.textOrNull(); fuzzyRewrite = parser.textOrNull();
} else if ("phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) { } else if ("phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) {
phraseSlop = parser.intValue(); phraseSlop = parser.intValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZINESS)) { } else if (parseContext.parseFieldMatcher().match(currentFieldName, Fuzziness.FIELD)) {
fuzziness = Fuzziness.parse(parser); fuzziness = Fuzziness.parse(parser);
} else if ("boost".equals(currentFieldName)) { } else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue(); boost = parser.floatValue();

View File

@ -70,7 +70,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
rethrowUnlessLenient(e); rethrowUnlessLenient(e);
} }
} }
return super.simplify(bq.build()); return simplify(bq.build());
} }
/** /**
@ -93,7 +93,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
rethrowUnlessLenient(e); rethrowUnlessLenient(e);
} }
} }
return super.simplify(bq.build()); return simplify(bq.build());
} }
@Override @Override
@ -111,7 +111,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
rethrowUnlessLenient(e); rethrowUnlessLenient(e);
} }
} }
return super.simplify(bq.build()); return simplify(bq.build());
} }
/** /**
@ -140,7 +140,19 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
return rethrowUnlessLenient(e); return rethrowUnlessLenient(e);
} }
} }
return super.simplify(bq.build()); return simplify(bq.build());
}
/**
* Override of lucenes SimpleQueryParser that doesn't simplify for the 1-clause case.
*/
@Override
protected Query simplify(BooleanQuery bq) {
if (bq.clauses().isEmpty()) {
return null;
} else {
return bq;
}
} }
/** /**

View File

@ -20,6 +20,8 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -285,8 +287,20 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
sqp.setDefaultOperator(defaultOperator.toBooleanClauseOccur()); sqp.setDefaultOperator(defaultOperator.toBooleanClauseOccur());
Query query = sqp.parse(queryText); Query query = sqp.parse(queryText);
if (minimumShouldMatch != null && query instanceof BooleanQuery) { if (query instanceof BooleanQuery) {
query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); BooleanQuery booleanQuery = (BooleanQuery) query;
if (booleanQuery.clauses().size() > 1
&& ((booleanQuery.clauses().iterator().next().getQuery() instanceof BooleanQuery) == false)) {
// special case for one term query and more than one field: (f1:t1 f2:t1 f3:t1)
// we need to wrap this in additional BooleanQuery so minimum_should_match is applied correctly
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(booleanQuery, Occur.SHOULD));
booleanQuery = builder.build();
}
if (minimumShouldMatch != null) {
booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch);
}
query = booleanQuery;
} }
return query; return query;
} }

View File

@ -57,24 +57,18 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
static final TermsQueryBuilder PROTOTYPE = new TermsQueryBuilder("field", "value"); static final TermsQueryBuilder PROTOTYPE = new TermsQueryBuilder("field", "value");
public static final boolean DEFAULT_DISABLE_COORD = false;
private final String fieldName; private final String fieldName;
private final List<Object> values; private final List<Object> values;
@Deprecated
private String minimumShouldMatch;
@Deprecated
private boolean disableCoord = DEFAULT_DISABLE_COORD;
private final TermsLookup termsLookup; private final TermsLookup termsLookup;
public TermsQueryBuilder(String fieldName, TermsLookup termsLookup) { public TermsQueryBuilder(String fieldName, TermsLookup termsLookup) {
this(fieldName, null, null, DEFAULT_DISABLE_COORD, termsLookup); this(fieldName, null, termsLookup);
} }
/** /**
* constructor used internally for serialization of both value / termslookup variants * constructor used internally for serialization of both value / termslookup variants
*/ */
TermsQueryBuilder(String fieldName, List<Object> values, String minimumShouldMatch, boolean disableCoord, TermsLookup termsLookup) { TermsQueryBuilder(String fieldName, List<Object> values, TermsLookup termsLookup) {
if (Strings.isEmpty(fieldName)) { if (Strings.isEmpty(fieldName)) {
throw new IllegalArgumentException("field name cannot be null."); throw new IllegalArgumentException("field name cannot be null.");
} }
@ -86,8 +80,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
} }
this.fieldName = fieldName; this.fieldName = fieldName;
this.values = values; this.values = values;
this.disableCoord = disableCoord;
this.minimumShouldMatch = minimumShouldMatch;
this.termsLookup = termsLookup; this.termsLookup = termsLookup;
} }
@ -178,34 +170,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
return convertToStringListIfBytesRefList(this.values); return convertToStringListIfBytesRefList(this.values);
} }
/**
* Sets the minimum number of matches across the provided terms. Defaults to <tt>1</tt>.
* @deprecated use [bool] query instead
*/
@Deprecated
public TermsQueryBuilder minimumShouldMatch(String minimumShouldMatch) {
this.minimumShouldMatch = minimumShouldMatch;
return this;
}
public String minimumShouldMatch() {
return this.minimumShouldMatch;
}
/**
* Disables <tt>Similarity#coord(int,int)</tt> in scoring. Defaults to <tt>false</tt>.
* @deprecated use [bool] query instead
*/
@Deprecated
public TermsQueryBuilder disableCoord(boolean disableCoord) {
this.disableCoord = disableCoord;
return this;
}
boolean disableCoord() {
return this.disableCoord;
}
public TermsLookup termsLookup() { public TermsLookup termsLookup() {
return this.termsLookup; return this.termsLookup;
} }
@ -252,12 +216,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
} else { } else {
builder.field(fieldName, convertToStringListIfBytesRefList(values)); builder.field(fieldName, convertToStringListIfBytesRefList(values));
} }
if (minimumShouldMatch != null) {
builder.field("minimum_should_match", minimumShouldMatch);
}
if (disableCoord != DEFAULT_DISABLE_COORD) {
builder.field("disable_coord", disableCoord);
}
printBoostAndQueryName(builder); printBoostAndQueryName(builder);
builder.endObject(); builder.endObject();
} }
@ -284,7 +242,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
if (terms == null || terms.isEmpty()) { if (terms == null || terms.isEmpty()) {
return Queries.newMatchNoDocsQuery(); return Queries.newMatchNoDocsQuery();
} }
return handleTermsQuery(terms, fieldName, context, minimumShouldMatch, disableCoord); return handleTermsQuery(terms, fieldName, context);
} }
private List<Object> fetch(TermsLookup termsLookup, Client client) { private List<Object> fetch(TermsLookup termsLookup, Client client) {
@ -300,7 +258,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
return terms; return terms;
} }
private static Query handleTermsQuery(List<Object> terms, String fieldName, QueryShardContext context, String minimumShouldMatch, boolean disableCoord) { private static Query handleTermsQuery(List<Object> terms, String fieldName, QueryShardContext context) {
MappedFieldType fieldType = context.fieldMapper(fieldName); MappedFieldType fieldType = context.fieldMapper(fieldName);
String indexFieldName; String indexFieldName;
if (fieldType != null) { if (fieldType != null) {
@ -322,7 +280,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
} }
} else { } else {
BooleanQuery.Builder bq = new BooleanQuery.Builder(); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.setDisableCoord(disableCoord);
for (Object term : terms) { for (Object term : terms) {
if (fieldType != null) { if (fieldType != null) {
bq.add(fieldType.termQuery(term, context), BooleanClause.Occur.SHOULD); bq.add(fieldType.termQuery(term, context), BooleanClause.Occur.SHOULD);
@ -330,7 +287,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
bq.add(new TermQuery(new Term(indexFieldName, BytesRefs.toBytesRef(term))), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term(indexFieldName, BytesRefs.toBytesRef(term))), BooleanClause.Occur.SHOULD);
} }
} }
query = Queries.applyMinimumShouldMatch(bq.build(), minimumShouldMatch); query = bq.build();
} }
return query; return query;
} }
@ -344,9 +301,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
lookup = TermsLookup.readTermsLookupFrom(in); lookup = TermsLookup.readTermsLookupFrom(in);
} }
List<Object> values = (List<Object>) in.readGenericValue(); List<Object> values = (List<Object>) in.readGenericValue();
String minimumShouldMatch = in.readOptionalString(); return new TermsQueryBuilder(field, values, lookup);
boolean disableCoord = in.readBoolean();
return new TermsQueryBuilder(field, values, minimumShouldMatch, disableCoord, lookup);
} }
@Override @Override
@ -357,21 +312,17 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
termsLookup.writeTo(out); termsLookup.writeTo(out);
} }
out.writeGenericValue(values); out.writeGenericValue(values);
out.writeOptionalString(minimumShouldMatch);
out.writeBoolean(disableCoord);
} }
@Override @Override
protected int doHashCode() { protected int doHashCode() {
return Objects.hash(fieldName, values, minimumShouldMatch, disableCoord, termsLookup); return Objects.hash(fieldName, values, termsLookup);
} }
@Override @Override
protected boolean doEquals(TermsQueryBuilder other) { protected boolean doEquals(TermsQueryBuilder other) {
return Objects.equals(fieldName, other.fieldName) && return Objects.equals(fieldName, other.fieldName) &&
Objects.equals(values, other.values) && Objects.equals(values, other.values) &&
Objects.equals(minimumShouldMatch, other.minimumShouldMatch) &&
Objects.equals(disableCoord, other.disableCoord) &&
Objects.equals(termsLookup, other.termsLookup); Objects.equals(termsLookup, other.termsLookup);
} }
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.indices.cache.query.terms.TermsLookup; import org.elasticsearch.indices.cache.query.terms.TermsLookup;
@ -38,11 +37,6 @@ import java.util.List;
*/ */
public class TermsQueryParser implements QueryParser { public class TermsQueryParser implements QueryParser {
private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match", "minimum_should_match")
.withAllDeprecated("Use [bool] query instead");
private static final ParseField DISABLE_COORD_FIELD = new ParseField("disable_coord").withAllDeprecated("Use [bool] query instead");
private static final ParseField EXECUTION_FIELD = new ParseField("execution").withAllDeprecated("execution is deprecated and has no effect");
@Override @Override
public String[] names() { public String[] names() {
return new String[]{TermsQueryBuilder.NAME, "in"}; return new String[]{TermsQueryBuilder.NAME, "in"};
@ -54,8 +48,6 @@ public class TermsQueryParser implements QueryParser {
String fieldName = null; String fieldName = null;
List<Object> values = null; List<Object> values = null;
String minShouldMatch = null;
boolean disableCoord = TermsQueryBuilder.DEFAULT_DISABLE_COORD;
TermsLookup termsLookup = null; TermsLookup termsLookup = null;
String queryName = null; String queryName = null;
@ -78,17 +70,8 @@ public class TermsQueryParser implements QueryParser {
fieldName = currentFieldName; fieldName = currentFieldName;
termsLookup = TermsLookup.parseTermsLookup(parser); termsLookup = TermsLookup.parseTermsLookup(parser);
} else if (token.isValue()) { } else if (token.isValue()) {
if (parseContext.parseFieldMatcher().match(currentFieldName, EXECUTION_FIELD)) { if ("boost".equals(currentFieldName)) {
// ignore
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MIN_SHOULD_MATCH_FIELD)) {
if (minShouldMatch != null) {
throw new IllegalArgumentException("[" + currentFieldName + "] is not allowed in a filter context for the [" + TermsQueryBuilder.NAME + "] query");
}
minShouldMatch = parser.textOrNull();
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue(); boost = parser.floatValue();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISABLE_COORD_FIELD)) {
disableCoord = parser.booleanValue();
} else if ("_name".equals(currentFieldName)) { } else if ("_name".equals(currentFieldName)) {
queryName = parser.text(); queryName = parser.text();
} else { } else {
@ -100,7 +83,7 @@ public class TermsQueryParser implements QueryParser {
if (fieldName == null) { if (fieldName == null) {
throw new ParsingException(parser.getTokenLocation(), "terms query requires a field name, followed by array of terms or a document lookup specification"); throw new ParsingException(parser.getTokenLocation(), "terms query requires a field name, followed by array of terms or a document lookup specification");
} }
return new TermsQueryBuilder(fieldName, values, minShouldMatch, disableCoord, termsLookup) return new TermsQueryBuilder(fieldName, values, termsLookup)
.boost(boost) .boost(boost)
.queryName(queryName); .queryName(queryName);
} }

View File

@ -46,7 +46,6 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
static final String MISPLACED_FUNCTION_MESSAGE_PREFIX = "you can either define [functions] array or a single function, not both. "; static final String MISPLACED_FUNCTION_MESSAGE_PREFIX = "you can either define [functions] array or a single function, not both. ";
public static final ParseField WEIGHT_FIELD = new ParseField("weight"); public static final ParseField WEIGHT_FIELD = new ParseField("weight");
private static final ParseField FILTER_FIELD = new ParseField("filter").withAllDeprecated("query");
private final ScoreFunctionParserMapper functionParserMapper; private final ScoreFunctionParserMapper functionParserMapper;
@ -65,7 +64,6 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
QueryBuilder query = null; QueryBuilder query = null;
QueryBuilder filter = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST; float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String queryName = null; String queryName = null;
@ -87,8 +85,6 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if ("query".equals(currentFieldName)) { } else if ("query".equals(currentFieldName)) {
query = parseContext.parseInnerQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) {
filter = parseContext.parseInnerQueryBuilder();
} else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) { } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {
scoreMode = FiltersFunctionScoreQuery.ScoreMode.fromString(parser.text()); scoreMode = FiltersFunctionScoreQuery.ScoreMode.fromString(parser.text());
} else if ("boost_mode".equals(currentFieldName) || "boostMode".equals(currentFieldName)) { } else if ("boost_mode".equals(currentFieldName) || "boostMode".equals(currentFieldName)) {
@ -132,15 +128,8 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
} }
} }
if (query == null && filter == null) { if (query == null) {
query = new MatchAllQueryBuilder(); query = new MatchAllQueryBuilder();
} else if (query == null && filter != null) {
query = new ConstantScoreQueryBuilder(filter);
} else if (query != null && filter != null) {
final BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder();
boolQueryBuilder.must(query);
boolQueryBuilder.filter(filter);
query = boolQueryBuilder;
} }
FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(query, FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(query,

View File

@ -55,11 +55,11 @@ import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.gateway.MetaDataStateFormat;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexServicesProvider; import org.elasticsearch.index.IndexServicesProvider;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache; import org.elasticsearch.index.cache.bitset.ShardBitsetFilterCache;
import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.cache.query.QueryCacheStats;
import org.elasticsearch.index.cache.request.ShardRequestCache; import org.elasticsearch.index.cache.request.ShardRequestCache;
@ -147,6 +147,7 @@ public class IndexShard extends AbstractIndexShardComponent {
private final MergePolicyConfig mergePolicyConfig; private final MergePolicyConfig mergePolicyConfig;
private final IndicesQueryCache indicesQueryCache; private final IndicesQueryCache indicesQueryCache;
private final IndexEventListener indexEventListener; private final IndexEventListener indexEventListener;
private final IndexSettings idxSettings;
private TimeValue refreshInterval; private TimeValue refreshInterval;
@ -195,8 +196,9 @@ public class IndexShard extends AbstractIndexShardComponent {
private final IndexingMemoryController indexingMemoryController; private final IndexingMemoryController indexingMemoryController;
@Inject @Inject
public IndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexServicesProvider provider) { public IndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexSearcherWrapper indexSearcherWrapper, IndexServicesProvider provider) {
super(shardId, indexSettings); super(shardId, indexSettings);
this.idxSettings = indexSettings;
this.codecService = provider.getCodecService(); this.codecService = provider.getCodecService();
this.warmer = provider.getWarmer(); this.warmer = provider.getWarmer();
this.deletionPolicy = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); this.deletionPolicy = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
@ -234,7 +236,7 @@ public class IndexShard extends AbstractIndexShardComponent {
final QueryCachingPolicy cachingPolicy; final QueryCachingPolicy cachingPolicy;
// the query cache is a node-level thing, however we want the most popular filters // the query cache is a node-level thing, however we want the most popular filters
// to be computed on a per-shard basis // to be computed on a per-shard basis
if (this.indexSettings.getAsBoolean(IndexCacheModule.QUERY_CACHE_EVERYTHING, false)) { if (this.indexSettings.getAsBoolean(IndexModule.QUERY_CACHE_EVERYTHING, false)) {
cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE; cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE;
} else { } else {
cachingPolicy = new UsageTrackingQueryCachingPolicy(); cachingPolicy = new UsageTrackingQueryCachingPolicy();
@ -247,7 +249,7 @@ public class IndexShard extends AbstractIndexShardComponent {
this.disableFlush = this.indexSettings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, false); this.disableFlush = this.indexSettings.getAsBoolean(INDEX_TRANSLOG_DISABLE_FLUSH, false);
this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId); this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId);
this.searcherWrapper = provider.getIndexSearcherWrapper(); this.searcherWrapper = indexSearcherWrapper;
this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, provider.getQueryParserService(), indexingService, mapperService, indexFieldDataService); this.percolatorQueriesRegistry = new PercolatorQueriesRegistry(shardId, indexSettings, provider.getQueryParserService(), indexingService, mapperService, indexFieldDataService);
if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) { if (mapperService.hasMapping(PercolatorService.TYPE_NAME)) {
percolatorQueriesRegistry.enableRealTimePercolator(); percolatorQueriesRegistry.enableRealTimePercolator();
@ -261,6 +263,10 @@ public class IndexShard extends AbstractIndexShardComponent {
return this.store; return this.store;
} }
public IndexSettings getIndexSettings() {
return idxSettings;
}
/** returns true if this shard supports indexing (i.e., write) operations. */ /** returns true if this shard supports indexing (i.e., write) operations. */
public boolean canIndex() { public boolean canIndex() {
return true; return true;
@ -678,7 +684,7 @@ public class IndexShard extends AbstractIndexShardComponent {
luceneVersion = segment.getVersion(); luceneVersion = segment.getVersion();
} }
} }
return luceneVersion == null ? Version.indexCreated(indexSettings).luceneVersion : luceneVersion; return luceneVersion == null ? idxSettings.getIndexVersionCreated().luceneVersion : luceneVersion;
} }
/** /**
@ -1452,8 +1458,9 @@ public class IndexShard extends AbstractIndexShardComponent {
recoveryState.getTranslog().incrementRecoveredOperations(); recoveryState.getTranslog().incrementRecoveredOperations();
} }
}; };
final Engine.Warmer engineWarmer = (searcher, toLevel) -> warmer.warm(searcher, this, idxSettings, toLevel);
return new EngineConfig(shardId, return new EngineConfig(shardId,
threadPool, indexingService, indexSettings, warmer, store, deletionPolicy, mergePolicyConfig.getMergePolicy(), mergeSchedulerConfig, threadPool, indexingService, indexSettings, engineWarmer, store, deletionPolicy, mergePolicyConfig.getMergePolicy(), mergeSchedulerConfig,
mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig, indexingMemoryController.getInactiveTime()); mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig, indexingMemoryController.getInactiveTime());
} }

View File

@ -37,8 +37,8 @@ import org.elasticsearch.index.translog.TranslogStats;
*/ */
public final class ShadowIndexShard extends IndexShard { public final class ShadowIndexShard extends IndexShard {
public ShadowIndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexServicesProvider provider) throws IOException { public ShadowIndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexSearcherWrapper wrapper, IndexServicesProvider provider) throws IOException {
super(shardId, indexSettings, path, store, provider); super(shardId, indexSettings, path, store, wrapper, provider);
} }
/** /**

View File

@ -24,11 +24,9 @@ import org.elasticsearch.action.update.UpdateHelper;
import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService;
import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.ShapesAvailability;
import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.index.query.*; import org.elasticsearch.index.query.*;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser;
import org.elasticsearch.index.query.MoreLikeThisQueryParser;
import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.indices.analysis.HunspellService; import org.elasticsearch.indices.analysis.HunspellService;
import org.elasticsearch.indices.analysis.IndicesAnalysisService; import org.elasticsearch.indices.analysis.IndicesAnalysisService;
@ -105,7 +103,6 @@ public class IndicesModule extends AbstractModule {
registerQueryParser(GeoBoundingBoxQueryParser.class); registerQueryParser(GeoBoundingBoxQueryParser.class);
registerQueryParser(GeohashCellQuery.Parser.class); registerQueryParser(GeohashCellQuery.Parser.class);
registerQueryParser(GeoPolygonQueryParser.class); registerQueryParser(GeoPolygonQueryParser.class);
registerQueryParser(QueryFilterParser.class);
registerQueryParser(ExistsQueryParser.class); registerQueryParser(ExistsQueryParser.class);
registerQueryParser(MissingQueryParser.class); registerQueryParser(MissingQueryParser.class);
registerQueryParser(MatchNoneQueryParser.class); registerQueryParser(MatchNoneQueryParser.class);

View File

@ -44,7 +44,6 @@ import org.elasticsearch.index.*;
import org.elasticsearch.index.analysis.AnalysisModule; import org.elasticsearch.index.analysis.AnalysisModule;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.IndexCacheModule;
import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.get.GetStats;
@ -61,6 +60,7 @@ import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.indices.analysis.IndicesAnalysisService; import org.elasticsearch.indices.analysis.IndicesAnalysisService;
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.PluginsService;
@ -94,6 +94,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
private final PluginsService pluginsService; private final PluginsService pluginsService;
private final NodeEnvironment nodeEnv; private final NodeEnvironment nodeEnv;
private final TimeValue shardsClosedTimeout; private final TimeValue shardsClosedTimeout;
private final IndicesWarmer indicesWarmer;
private final IndicesQueryCache indicesQueryCache;
private volatile Map<String, IndexServiceInjectorPair> indices = emptyMap(); private volatile Map<String, IndexServiceInjectorPair> indices = emptyMap();
@ -121,12 +123,14 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
private final IndexStoreConfig indexStoreConfig; private final IndexStoreConfig indexStoreConfig;
@Inject @Inject
public IndicesService(Settings settings, IndicesAnalysisService indicesAnalysisService, Injector injector, PluginsService pluginsService, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService) { public IndicesService(Settings settings, IndicesAnalysisService indicesAnalysisService, Injector injector, PluginsService pluginsService, NodeEnvironment nodeEnv, NodeSettingsService nodeSettingsService, IndicesQueryCache indicesQueryCache, IndicesWarmer indicesWarmer) {
super(settings); super(settings);
this.indicesAnalysisService = indicesAnalysisService; this.indicesAnalysisService = indicesAnalysisService;
this.injector = injector; this.injector = injector;
this.pluginsService = pluginsService; this.pluginsService = pluginsService;
this.nodeEnv = nodeEnv; this.nodeEnv = nodeEnv;
this.indicesWarmer = indicesWarmer;
this.indicesQueryCache = indicesQueryCache;
this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS)); this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS));
this.indexStoreConfig = new IndexStoreConfig(settings); this.indexStoreConfig = new IndexStoreConfig(settings);
nodeSettingsService.addListener(indexStoreConfig); nodeSettingsService.addListener(indexStoreConfig);
@ -306,13 +310,12 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
for (Module pluginModule : pluginsService.indexModules(idxSettings.getSettings())) { for (Module pluginModule : pluginsService.indexModules(idxSettings.getSettings())) {
modules.add(pluginModule); modules.add(pluginModule);
} }
final IndexModule indexModule = new IndexModule(idxSettings, indexStoreConfig); final IndexModule indexModule = new IndexModule(idxSettings, indexStoreConfig, indicesQueryCache, indicesWarmer);
for (IndexEventListener listener : builtInListeners) { for (IndexEventListener listener : builtInListeners) {
indexModule.addIndexEventListener(listener); indexModule.addIndexEventListener(listener);
} }
indexModule.addIndexEventListener(oldShardsStats); indexModule.addIndexEventListener(oldShardsStats);
modules.add(new AnalysisModule(idxSettings.getSettings(), indicesAnalysisService)); modules.add(new AnalysisModule(idxSettings.getSettings(), indicesAnalysisService));
modules.add(new IndexCacheModule(idxSettings.getSettings()));
modules.add(indexModule); modules.add(indexModule);
pluginsService.processModules(modules); pluginsService.processModules(modules);
final IndexEventListener listener = indexModule.freeze(); final IndexEventListener listener = indexModule.freeze();

View File

@ -19,23 +19,21 @@
package org.elasticsearch.indices; package org.elasticsearch.indices;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
/** /**
@ -46,68 +44,46 @@ public final class IndicesWarmer extends AbstractComponent {
private final ThreadPool threadPool; private final ThreadPool threadPool;
private final ClusterService clusterService;
private final IndicesService indicesService;
private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<>(); private final CopyOnWriteArrayList<Listener> listeners = new CopyOnWriteArrayList<>();
@Inject @Inject
public IndicesWarmer(Settings settings, ThreadPool threadPool, ClusterService clusterService, IndicesService indicesService) { public IndicesWarmer(Settings settings, ThreadPool threadPool) {
super(settings); super(settings);
this.threadPool = threadPool; this.threadPool = threadPool;
this.clusterService = clusterService;
this.indicesService = indicesService;
} }
public void addListener(Listener listener) { public void addListener(Listener listener) {
listeners.add(listener); listeners.add(listener);
} }
public void removeListener(Listener listener) { public void removeListener(Listener listener) {
listeners.remove(listener); listeners.remove(listener);
} }
public void warmNewReaders(final WarmerContext context) { public void warm(Engine.Searcher searcher, IndexShard shard, IndexSettings settings, boolean isTopReader) {
warmInternal(context, false); if (shard.state() == IndexShardState.CLOSED) {
}
public void warmTopReader(WarmerContext context) {
warmInternal(context, true);
}
private void warmInternal(final WarmerContext context, boolean topReader) {
final IndexMetaData indexMetaData = clusterService.state().metaData().index(context.shardId().index().name());
if (indexMetaData == null) {
return; return;
} }
if (!indexMetaData.getSettings().getAsBoolean(INDEX_WARMER_ENABLED, settings.getAsBoolean(INDEX_WARMER_ENABLED, true))) { final IndexMetaData indexMetaData = settings.getIndexMetaData();
return; final Settings indexSettings = settings.getSettings();
} if (!indexSettings.getAsBoolean(INDEX_WARMER_ENABLED, settings.getNodeSettings().getAsBoolean(INDEX_WARMER_ENABLED, true))) {
IndexService indexService = indicesService.indexService(context.shardId().index().name());
if (indexService == null) {
return;
}
final IndexShard indexShard = indexService.getShardOrNull(context.shardId().id());
if (indexShard == null) {
return; return;
} }
if (logger.isTraceEnabled()) { if (logger.isTraceEnabled()) {
if (topReader) { if (isTopReader) {
logger.trace("[{}][{}] top warming [{}]", context.shardId().index().name(), context.shardId().id(), context); logger.trace("{} top warming [{}]", shard.shardId(), searcher.reader());
} else { } else {
logger.trace("[{}][{}] warming [{}]", context.shardId().index().name(), context.shardId().id(), context); logger.trace("{} warming [{}]", shard.shardId(), searcher.reader());
} }
} }
indexShard.warmerService().onPreWarm(); shard.warmerService().onPreWarm();
long time = System.nanoTime(); long time = System.nanoTime();
final List<TerminationHandle> terminationHandles = new ArrayList<>(); final List<TerminationHandle> terminationHandles = new ArrayList<>();
// get a handle on pending tasks // get a handle on pending tasks
for (final Listener listener : listeners) { for (final Listener listener : listeners) {
if (topReader) { if (isTopReader) {
terminationHandles.add(listener.warmTopReader(indexShard, indexMetaData, context, threadPool)); terminationHandles.add(listener.warmTopReader(shard, searcher));
} else { } else {
terminationHandles.add(listener.warmNewReaders(indexShard, indexMetaData, context, threadPool)); terminationHandles.add(listener.warmNewReaders(shard, searcher));
} }
} }
// wait for termination // wait for termination
@ -116,7 +92,7 @@ public final class IndicesWarmer extends AbstractComponent {
terminationHandle.awaitTermination(); terminationHandle.awaitTermination();
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
if (topReader) { if (isTopReader) {
logger.warn("top warming has been interrupted", e); logger.warn("top warming has been interrupted", e);
} else { } else {
logger.warn("warming has been interrupted", e); logger.warn("warming has been interrupted", e);
@ -125,69 +101,36 @@ public final class IndicesWarmer extends AbstractComponent {
} }
} }
long took = System.nanoTime() - time; long took = System.nanoTime() - time;
indexShard.warmerService().onPostWarm(took); shard.warmerService().onPostWarm(took);
if (indexShard.warmerService().logger().isTraceEnabled()) { if (shard.warmerService().logger().isTraceEnabled()) {
if (topReader) { if (isTopReader) {
indexShard.warmerService().logger().trace("top warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS)); shard.warmerService().logger().trace("top warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS));
} else { } else {
indexShard.warmerService().logger().trace("warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS)); shard.warmerService().logger().trace("warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS));
} }
} }
} }
/**
* Returns an executor for async warmer tasks
*/
public Executor getExecutor() {
return threadPool.executor(ThreadPool.Names.WARMER);
}
/** A handle on the execution of warm-up action. */ /** A handle on the execution of warm-up action. */
public interface TerminationHandle { public interface TerminationHandle {
public static TerminationHandle NO_WAIT = new TerminationHandle() { TerminationHandle NO_WAIT = () -> {};
@Override
public void awaitTermination() {}
};
/** Wait until execution of the warm-up action completes. */ /** Wait until execution of the warm-up action completes. */
void awaitTermination() throws InterruptedException; void awaitTermination() throws InterruptedException;
} }
public static abstract class Listener { public interface Listener {
public String executor() {
return ThreadPool.Names.WARMER;
}
/** Queue tasks to warm-up the given segments and return handles that allow to wait for termination of the execution of those tasks. */ /** Queue tasks to warm-up the given segments and return handles that allow to wait for termination of the execution of those tasks. */
public abstract TerminationHandle warmNewReaders(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool); TerminationHandle warmNewReaders(IndexShard indexShard, Engine.Searcher searcher);
public abstract TerminationHandle warmTopReader(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool); TerminationHandle warmTopReader(IndexShard indexShard, Engine.Searcher searcher);
} }
public static final class WarmerContext {
private final ShardId shardId;
private final Engine.Searcher searcher;
public WarmerContext(ShardId shardId, Engine.Searcher searcher) {
this.shardId = shardId;
this.searcher = searcher;
}
public ShardId shardId() {
return shardId;
}
/** Return a searcher instance that only wraps the segments to warm. */
public Engine.Searcher searcher() {
return searcher;
}
public IndexReader reader() {
return searcher.reader();
}
public DirectoryReader getDirectoryReader() {
return searcher.getDirectoryReader();
}
@Override
public String toString() {
return "WarmerContext: " + searcher.reader();
}
}
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.indices.cache.query;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.BulkScorer;
import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.LRUQueryCache; import org.apache.lucene.search.LRUQueryCache;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -256,6 +257,12 @@ public class IndicesQueryCache extends AbstractComponent implements QueryCache,
shardKeyMap.add(context.reader()); shardKeyMap.add(context.reader());
return in.scorer(context); return in.scorer(context);
} }
@Override
public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
shardKeyMap.add(context.reader());
return in.bulkScorer(context);
}
} }
/** Clear all entries that belong to the given index. */ /** Clear all entries that belong to the given index. */

View File

@ -27,6 +27,7 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction; import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction;
import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction; import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction;
import org.elasticsearch.cluster.action.shard.NoOpShardStateActionListener;
import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
@ -76,6 +77,8 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
private final NodeIndexDeletedAction nodeIndexDeletedAction; private final NodeIndexDeletedAction nodeIndexDeletedAction;
private final NodeMappingRefreshAction nodeMappingRefreshAction; private final NodeMappingRefreshAction nodeMappingRefreshAction;
private static final ShardStateAction.Listener SHARD_STATE_ACTION_LISTENER = new NoOpShardStateActionListener();
// a map of mappings type we have seen per index due to cluster state // a map of mappings type we have seen per index due to cluster state
// we need this so we won't remove types automatically created as part of the indexing process // we need this so we won't remove types automatically created as part of the indexing process
private final ConcurrentMap<Tuple<String, String>, Boolean> seenMappings = ConcurrentCollections.newConcurrentMap(); private final ConcurrentMap<Tuple<String, String>, Boolean> seenMappings = ConcurrentCollections.newConcurrentMap();
@ -473,7 +476,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
if (failedShards.containsKey(shardRouting.shardId())) { if (failedShards.containsKey(shardRouting.shardId())) {
if (nodes.masterNode() != null) { if (nodes.masterNode() != null) {
shardStateAction.resendShardFailed(shardRouting, indexMetaData.getIndexUUID(), nodes.masterNode(), shardStateAction.resendShardFailed(shardRouting, indexMetaData.getIndexUUID(), nodes.masterNode(),
"master " + nodes.masterNode() + " marked shard as started, but shard has previous failed. resending shard failure.", null); "master " + nodes.masterNode() + " marked shard as started, but shard has previous failed. resending shard failure.", null, SHARD_STATE_ACTION_LISTENER);
} }
} else { } else {
// the master thinks we are started, but we don't have this shard at all, mark it as failed // the master thinks we are started, but we don't have this shard at all, mark it as failed
@ -606,7 +609,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
if (failedShards.containsKey(shardRouting.shardId())) { if (failedShards.containsKey(shardRouting.shardId())) {
if (nodes.masterNode() != null) { if (nodes.masterNode() != null) {
shardStateAction.resendShardFailed(shardRouting, indexMetaData.getIndexUUID(), nodes.masterNode(), shardStateAction.resendShardFailed(shardRouting, indexMetaData.getIndexUUID(), nodes.masterNode(),
"master " + nodes.masterNode() + " marked shard as initializing, but shard is marked as failed, resend shard failure", null); "master " + nodes.masterNode() + " marked shard as initializing, but shard is marked as failed, resend shard failure", null, SHARD_STATE_ACTION_LISTENER);
} }
return; return;
} }
@ -802,7 +805,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
try { try {
logger.warn("[{}] marking and sending shard failed due to [{}]", failure, shardRouting.shardId(), message); logger.warn("[{}] marking and sending shard failed due to [{}]", failure, shardRouting.shardId(), message);
failedShards.put(shardRouting.shardId(), new FailedShard(shardRouting.version())); failedShards.put(shardRouting.shardId(), new FailedShard(shardRouting.version()));
shardStateAction.shardFailed(shardRouting, indexUUID, message, failure); shardStateAction.shardFailed(shardRouting, indexUUID, message, failure, SHARD_STATE_ACTION_LISTENER);
} catch (Throwable e1) { } catch (Throwable e1) {
logger.warn("[{}][{}] failed to mark shard as failed (because of [{}])", e1, shardRouting.getIndex(), shardRouting.getId(), message); logger.warn("[{}][{}] failed to mark shard as failed (because of [{}])", e1, shardRouting.getIndex(), shardRouting.getId(), message);
} }

View File

@ -182,7 +182,7 @@ public class Node implements Releasable {
modules.add(new HttpServerModule(settings)); modules.add(new HttpServerModule(settings));
} }
modules.add(new IndicesModule()); modules.add(new IndicesModule());
modules.add(new SearchModule(settings)); modules.add(new SearchModule());
modules.add(new ActionModule(false)); modules.add(new ActionModule(false));
modules.add(new MonitorModule(settings)); modules.add(new MonitorModule(settings));
modules.add(new GatewayModule(settings)); modules.add(new GatewayModule(settings));

View File

@ -152,13 +152,14 @@ public class NodeService extends AbstractComponent {
transportService.stats(), transportService.stats(),
httpServer == null ? null : httpServer.stats(), httpServer == null ? null : httpServer.stats(),
circuitBreakerService.stats(), circuitBreakerService.stats(),
scriptService.stats() scriptService.stats(),
discovery.stats()
); );
} }
public NodeStats stats(CommonStatsFlags indices, boolean os, boolean process, boolean jvm, boolean threadPool, public NodeStats stats(CommonStatsFlags indices, boolean os, boolean process, boolean jvm, boolean threadPool,
boolean fs, boolean transport, boolean http, boolean circuitBreaker, boolean fs, boolean transport, boolean http, boolean circuitBreaker,
boolean script) { boolean script, boolean discoveryStats) {
// for indices stats we want to include previous allocated shards stats as well (it will // for indices stats we want to include previous allocated shards stats as well (it will
// only be applied to the sensible ones to use, like refresh/merge/flush/indexing stats) // only be applied to the sensible ones to use, like refresh/merge/flush/indexing stats)
return new NodeStats(discovery.localNode(), System.currentTimeMillis(), return new NodeStats(discovery.localNode(), System.currentTimeMillis(),
@ -171,7 +172,8 @@ public class NodeService extends AbstractComponent {
transport ? transportService.stats() : null, transport ? transportService.stats() : null,
http ? (httpServer == null ? null : httpServer.stats()) : null, http ? (httpServer == null ? null : httpServer.stats()) : null,
circuitBreaker ? circuitBreakerService.stats() : null, circuitBreaker ? circuitBreakerService.stats() : null,
script ? scriptService.stats() : null script ? scriptService.stats() : null,
discoveryStats ? discovery.stats() : null
); );
} }
} }

View File

@ -40,6 +40,7 @@ import org.elasticsearch.common.settings.Settings;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.net.URL; import java.net.URL;
import java.net.URLClassLoader; import java.net.URLClassLoader;
@ -192,8 +193,12 @@ public class PluginsService extends AbstractComponent {
if (reference.moduleClass.isAssignableFrom(module.getClass())) { if (reference.moduleClass.isAssignableFrom(module.getClass())) {
try { try {
reference.onModuleMethod.invoke(plugin.v2(), module); reference.onModuleMethod.invoke(plugin.v2(), module);
} catch (IllegalAccessException | InvocationTargetException e) {
logger.warn("plugin {}, failed to invoke custom onModule method", e, plugin.v2().name());
throw new ElasticsearchException("failed to invoke onModule", e);
} catch (Exception e) { } catch (Exception e) {
logger.warn("plugin {}, failed to invoke custom onModule method", e, plugin.v2().name()); logger.warn("plugin {}, failed to invoke custom onModule method", e, plugin.v2().name());
throw e;
} }
} }
} }

View File

@ -77,6 +77,7 @@ public class RestNodesStatsAction extends BaseRestHandler {
nodesStatsRequest.process(metrics.contains("process")); nodesStatsRequest.process(metrics.contains("process"));
nodesStatsRequest.breaker(metrics.contains("breaker")); nodesStatsRequest.breaker(metrics.contains("breaker"));
nodesStatsRequest.script(metrics.contains("script")); nodesStatsRequest.script(metrics.contains("script"));
nodesStatsRequest.discovery(metrics.contains("discovery"));
// check for index specific metrics // check for index specific metrics
if (metrics.contains("indices")) { if (metrics.contains("indices")) {

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
@ -71,9 +70,7 @@ public class RestCountAction extends AbstractCatAction {
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0);
countRequest.source(searchSourceBuilder); countRequest.source(searchSourceBuilder);
if (source != null) { if (source != null) {
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); searchSourceBuilder.query(RestActions.getQueryContent(new BytesArray(source), indicesQueriesRegistry, parseFieldMatcher));
context.parseFieldMatcher(parseFieldMatcher);
searchSourceBuilder.query(RestActions.getQueryContent(new BytesArray(source), context));
} else { } else {
QueryBuilder<?> queryBuilder = RestActions.urlParamsToQueryBuilder(request); QueryBuilder<?> queryBuilder = RestActions.urlParamsToQueryBuilder(request);
if (queryBuilder != null) { if (queryBuilder != null) {

View File

@ -29,7 +29,6 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.*; import org.elasticsearch.rest.*;
import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestActions;
@ -68,9 +67,7 @@ public class RestCountAction extends BaseRestHandler {
countRequest.source(searchSourceBuilder); countRequest.source(searchSourceBuilder);
if (RestActions.hasBodyContent(request)) { if (RestActions.hasBodyContent(request)) {
BytesReference restContent = RestActions.getRestContent(request); BytesReference restContent = RestActions.getRestContent(request);
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); searchSourceBuilder.query(RestActions.getQueryContent(restContent, indicesQueriesRegistry, parseFieldMatcher));
context.parseFieldMatcher(parseFieldMatcher);
searchSourceBuilder.query(RestActions.getQueryContent(restContent, context));
} else { } else {
QueryBuilder<?> queryBuilder = RestActions.urlParamsToQueryBuilder(request); QueryBuilder<?> queryBuilder = RestActions.urlParamsToQueryBuilder(request);
if (queryBuilder != null) { if (queryBuilder != null) {

View File

@ -22,17 +22,16 @@ package org.elasticsearch.rest.action.explain;
import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Explanation;
import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.support.QuerySourceBuilder;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.index.query.QueryStringQueryBuilder;
import org.elasticsearch.rest.*; import org.elasticsearch.rest.*;
import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestActions;
import org.elasticsearch.rest.action.support.RestBuilderListener; import org.elasticsearch.rest.action.support.RestBuilderListener;
@ -50,9 +49,12 @@ import static org.elasticsearch.rest.RestStatus.OK;
*/ */
public class RestExplainAction extends BaseRestHandler { public class RestExplainAction extends BaseRestHandler {
private final IndicesQueriesRegistry indicesQueriesRegistry;
@Inject @Inject
public RestExplainAction(Settings settings, RestController controller, Client client) { public RestExplainAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry) {
super(settings, controller, client); super(settings, controller, client);
this.indicesQueriesRegistry = indicesQueriesRegistry;
controller.registerHandler(GET, "/{index}/{type}/{id}/_explain", this); controller.registerHandler(GET, "/{index}/{type}/{id}/_explain", this);
controller.registerHandler(POST, "/{index}/{type}/{id}/_explain", this); controller.registerHandler(POST, "/{index}/{type}/{id}/_explain", this);
} }
@ -65,22 +67,11 @@ public class RestExplainAction extends BaseRestHandler {
explainRequest.preference(request.param("preference")); explainRequest.preference(request.param("preference"));
String queryString = request.param("q"); String queryString = request.param("q");
if (RestActions.hasBodyContent(request)) { if (RestActions.hasBodyContent(request)) {
explainRequest.source(RestActions.getRestContent(request)); BytesReference restContent = RestActions.getRestContent(request);
explainRequest.query(RestActions.getQueryContent(restContent, indicesQueriesRegistry, parseFieldMatcher));
} else if (queryString != null) { } else if (queryString != null) {
QueryStringQueryBuilder queryStringBuilder = QueryBuilders.queryStringQuery(queryString); QueryBuilder<?> query = RestActions.urlParamsToQueryBuilder(request);
queryStringBuilder.defaultField(request.param("df")); explainRequest.query(query);
queryStringBuilder.analyzer(request.param("analyzer"));
queryStringBuilder.analyzeWildcard(request.paramAsBoolean("analyze_wildcard", false));
queryStringBuilder.lowercaseExpandedTerms(request.paramAsBoolean("lowercase_expanded_terms", true));
queryStringBuilder.lenient(request.paramAsBoolean("lenient", null));
String defaultOperator = request.param("default_operator");
if (defaultOperator != null) {
queryStringBuilder.defaultOperator(Operator.fromString(defaultOperator));
}
QuerySourceBuilder querySourceBuilder = new QuerySourceBuilder();
querySourceBuilder.setQuery(queryStringBuilder);
explainRequest.source(querySourceBuilder);
} }
String sField = request.param("fields"); String sField = request.param("fields");

View File

@ -27,17 +27,8 @@ import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.*;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.*;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryStringQueryBuilder;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
@ -142,14 +133,12 @@ public class RestActions {
return content; return content;
} }
public static QueryBuilder<?> getQueryContent(BytesReference source, QueryParseContext context) { public static QueryBuilder<?> getQueryContent(BytesReference source, IndicesQueriesRegistry indicesQueriesRegistry, ParseFieldMatcher parseFieldMatcher) {
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
try (XContentParser requestParser = XContentFactory.xContent(source).createParser(source)) { try (XContentParser requestParser = XContentFactory.xContent(source).createParser(source)) {
// Save the parseFieldMatcher because its about to be trashed in the
// QueryParseContext
ParseFieldMatcher parseFieldMatcher = context.parseFieldMatcher();
context.reset(requestParser); context.reset(requestParser);
context.parseFieldMatcher(parseFieldMatcher); context.parseFieldMatcher(parseFieldMatcher);
return context.parseInnerQueryBuilder(); return context.parseTopLevelQueryBuilder();
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchException("failed to parse source", e); throw new ElasticsearchException("failed to parse source", e);
} finally { } finally {

View File

@ -156,7 +156,6 @@ import java.util.Set;
*/ */
public class SearchModule extends AbstractModule { public class SearchModule extends AbstractModule {
private final Settings settings;
private final Set<Class<? extends Aggregator.Parser>> aggParsers = new HashSet<>(); private final Set<Class<? extends Aggregator.Parser>> aggParsers = new HashSet<>();
private final Set<Class<? extends PipelineAggregator.Parser>> pipelineAggParsers = new HashSet<>(); private final Set<Class<? extends PipelineAggregator.Parser>> pipelineAggParsers = new HashSet<>();
private final Highlighters highlighters = new Highlighters(); private final Highlighters highlighters = new Highlighters();
@ -169,19 +168,6 @@ public class SearchModule extends AbstractModule {
// pkg private so tests can mock // pkg private so tests can mock
Class<? extends SearchService> searchServiceImpl = SearchService.class; Class<? extends SearchService> searchServiceImpl = SearchService.class;
public SearchModule(Settings settings) {
this.settings = settings;
}
// TODO document public API
public void registerStream(SignificanceHeuristicStreams.Stream stream) {
SignificanceHeuristicStreams.registerStream(stream);
}
public void registerStream(MovAvgModelStreams.Stream stream) {
MovAvgModelStreams.registerStream(stream);
}
public void registerHighlighter(String key, Class<? extends Highlighter> clazz) { public void registerHighlighter(String key, Class<? extends Highlighter> clazz) {
highlighters.registerExtension(key, clazz); highlighters.registerExtension(key, clazz);
} }

View File

@ -68,7 +68,6 @@ import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.IndicesWarmer;
import org.elasticsearch.indices.IndicesWarmer.TerminationHandle; import org.elasticsearch.indices.IndicesWarmer.TerminationHandle;
import org.elasticsearch.indices.IndicesWarmer.WarmerContext;
import org.elasticsearch.indices.cache.request.IndicesRequestCache; import org.elasticsearch.indices.cache.request.IndicesRequestCache;
import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
@ -180,8 +179,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
this.keepAliveReaper = threadPool.scheduleWithFixedDelay(new Reaper(), keepAliveInterval); this.keepAliveReaper = threadPool.scheduleWithFixedDelay(new Reaper(), keepAliveInterval);
this.indicesWarmer.addListener(new NormsWarmer()); this.indicesWarmer.addListener(new NormsWarmer(indicesWarmer));
this.indicesWarmer.addListener(new FieldDataWarmer()); this.indicesWarmer.addListener(new FieldDataWarmer(indicesWarmer));
this.indicesWarmer.addListener(new SearchWarmer()); this.indicesWarmer.addListener(new SearchWarmer());
defaultSearchTimeout = settings.getAsTime(DEFAULT_SEARCH_TIMEOUT, NO_TIMEOUT); defaultSearchTimeout = settings.getAsTime(DEFAULT_SEARCH_TIMEOUT, NO_TIMEOUT);
@ -949,11 +948,15 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
return this.activeContexts.size(); return this.activeContexts.size();
} }
static class NormsWarmer extends IndicesWarmer.Listener { static class NormsWarmer implements IndicesWarmer.Listener {
private final IndicesWarmer indicesWarmer;
public NormsWarmer(IndicesWarmer indicesWarmer) {
this.indicesWarmer = indicesWarmer;
}
@Override @Override
public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) { public TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) {
final Loading defaultLoading = Loading.parse(indexMetaData.getSettings().get(NORMS_LOADING_KEY), Loading.LAZY); final Loading defaultLoading = Loading.parse(indexShard.getIndexSettings().getSettings().get(NORMS_LOADING_KEY), Loading.LAZY);
final MapperService mapperService = indexShard.mapperService(); final MapperService mapperService = indexShard.mapperService();
final ObjectSet<String> warmUp = new ObjectHashSet<>(); final ObjectSet<String> warmUp = new ObjectHashSet<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (DocumentMapper docMapper : mapperService.docMappers(false)) {
@ -971,14 +974,14 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch = new CountDownLatch(1);
// Norms loading may be I/O intensive but is not CPU intensive, so we execute it in a single task // Norms loading may be I/O intensive but is not CPU intensive, so we execute it in a single task
threadPool.executor(executor()).execute(new Runnable() { indicesWarmer.getExecutor().execute(new Runnable() {
@Override @Override
public void run() { public void run() {
try { try {
for (ObjectCursor<String> stringObjectCursor : warmUp) { for (ObjectCursor<String> stringObjectCursor : warmUp) {
final String indexName = stringObjectCursor.value; final String indexName = stringObjectCursor.value;
final long start = System.nanoTime(); final long start = System.nanoTime();
for (final LeafReaderContext ctx : context.searcher().reader().leaves()) { for (final LeafReaderContext ctx : searcher.reader().leaves()) {
final NumericDocValues values = ctx.reader().getNormValues(indexName); final NumericDocValues values = ctx.reader().getNormValues(indexName);
if (values != null) { if (values != null) {
values.get(0); values.get(0);
@ -1005,15 +1008,21 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
} }
@Override @Override
public TerminationHandle warmTopReader(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool) { public TerminationHandle warmTopReader(IndexShard indexShard, final Engine.Searcher searcher) {
return TerminationHandle.NO_WAIT; return TerminationHandle.NO_WAIT;
} }
} }
static class FieldDataWarmer extends IndicesWarmer.Listener { static class FieldDataWarmer implements IndicesWarmer.Listener {
private final IndicesWarmer indicesWarmer;
public FieldDataWarmer(IndicesWarmer indicesWarmer) {
this.indicesWarmer = indicesWarmer;
}
@Override @Override
public TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) { public TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) {
final MapperService mapperService = indexShard.mapperService(); final MapperService mapperService = indexShard.mapperService();
final Map<String, MappedFieldType> warmUp = new HashMap<>(); final Map<String, MappedFieldType> warmUp = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (DocumentMapper docMapper : mapperService.docMappers(false)) {
@ -1048,9 +1057,9 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
} }
} }
final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService(); final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService();
final Executor executor = threadPool.executor(executor()); final Executor executor = indicesWarmer.getExecutor();
final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size()); final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size() * warmUp.size());
for (final LeafReaderContext ctx : context.searcher().reader().leaves()) { for (final LeafReaderContext ctx : searcher.reader().leaves()) {
for (final MappedFieldType fieldType : warmUp.values()) { for (final MappedFieldType fieldType : warmUp.values()) {
executor.execute(new Runnable() { executor.execute(new Runnable() {
@ -1081,7 +1090,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
} }
@Override @Override
public TerminationHandle warmTopReader(final IndexShard indexShard, IndexMetaData indexMetaData, final WarmerContext context, ThreadPool threadPool) { public TerminationHandle warmTopReader(final IndexShard indexShard, final Engine.Searcher searcher) {
final MapperService mapperService = indexShard.mapperService(); final MapperService mapperService = indexShard.mapperService();
final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>(); final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (DocumentMapper docMapper : mapperService.docMappers(false)) {
@ -1114,7 +1123,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
} }
} }
final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService(); final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService();
final Executor executor = threadPool.executor(executor()); final Executor executor = indicesWarmer.getExecutor();
final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size()); final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size());
for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) { for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) {
executor.execute(new Runnable() { executor.execute(new Runnable() {
@ -1123,7 +1132,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
try { try {
final long start = System.nanoTime(); final long start = System.nanoTime();
IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldType); IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldType);
ifd.loadGlobal(context.getDirectoryReader()); ifd.loadGlobal(searcher.getDirectoryReader());
if (indexShard.warmerService().logger().isTraceEnabled()) { if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldType.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start));
} }
@ -1144,83 +1153,73 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
} }
} }
class SearchWarmer extends IndicesWarmer.Listener { class SearchWarmer implements IndicesWarmer.Listener {
@Override @Override
public TerminationHandle warmNewReaders(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool) { public TerminationHandle warmNewReaders(IndexShard indexShard, final Engine.Searcher searcher) {
return internalWarm(indexShard, indexMetaData, context, threadPool, false); return internalWarm(indexShard, searcher, false);
} }
@Override @Override
public TerminationHandle warmTopReader(IndexShard indexShard, IndexMetaData indexMetaData, WarmerContext context, ThreadPool threadPool) { public TerminationHandle warmTopReader(IndexShard indexShard, final Engine.Searcher searcher) {
return internalWarm(indexShard, indexMetaData, context, threadPool, true); return internalWarm(indexShard, searcher, true);
} }
public TerminationHandle internalWarm(final IndexShard indexShard, final IndexMetaData indexMetaData, final IndicesWarmer.WarmerContext warmerContext, ThreadPool threadPool, final boolean top) { public TerminationHandle internalWarm(final IndexShard indexShard, final Engine.Searcher searcher, final boolean top) {
IndexWarmersMetaData custom = indexMetaData.custom(IndexWarmersMetaData.TYPE); IndexWarmersMetaData custom = indexShard.getIndexSettings().getIndexMetaData().custom(IndexWarmersMetaData.TYPE);
if (custom == null) { if (custom == null) {
return TerminationHandle.NO_WAIT; return TerminationHandle.NO_WAIT;
} }
final Executor executor = threadPool.executor(executor()); final Executor executor = indicesWarmer.getExecutor();
final CountDownLatch latch = new CountDownLatch(custom.entries().size()); final CountDownLatch latch = new CountDownLatch(custom.entries().size());
for (final IndexWarmersMetaData.Entry entry : custom.entries()) { for (final IndexWarmersMetaData.Entry entry : custom.entries()) {
executor.execute(new Runnable() { executor.execute(() -> {
SearchContext context = null;
@Override try {
public void run() { long now = System.nanoTime();
SearchContext context = null; final IndexService indexService = indicesService.indexServiceSafe(indexShard.shardId().index().name());
QueryParseContext queryParseContext = new QueryParseContext(indexService.queryParserService().indicesQueriesRegistry());
queryParseContext.parseFieldMatcher(indexService.queryParserService().parseFieldMatcher());
ShardSearchRequest request = new ShardSearchLocalRequest(indexShard.shardId(), indexShard.getIndexSettings()
.getNumberOfShards(),
SearchType.QUERY_THEN_FETCH, entry.source().build(queryParseContext), entry.types(), entry.requestCache());
context = createContext(request, searcher);
// if we use sort, we need to do query to sort on
// it and load relevant field data
// if not, we might as well set size=0 (and cache
// if needed)
if (context.sort() == null) {
context.size(0);
}
boolean canCache = indicesQueryCache.canCache(request, context);
// early terminate when we can cache, since we
// can only do proper caching on top level searcher
// also, if we can't cache, and its top, we don't
// need to execute it, since we already did when its
// not top
if (canCache != top) {
return;
}
loadOrExecuteQueryPhase(request, context, queryPhase);
long took = System.nanoTime() - now;
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed [{}], took [{}]", entry.name(), TimeValue.timeValueNanos(took));
}
} catch (Throwable t) {
indexShard.warmerService().logger().warn("warmer [{}] failed", t, entry.name());
} finally {
try { try {
long now = System.nanoTime(); if (context != null) {
final IndexService indexService = indicesService.indexServiceSafe(indexShard.shardId().index().name()); freeContext(context.id());
QueryParseContext queryParseContext = new QueryParseContext(indexService.queryParserService().indicesQueriesRegistry()); cleanContext(context);
queryParseContext.parseFieldMatcher(indexService.queryParserService().parseFieldMatcher());
ShardSearchRequest request = new ShardSearchLocalRequest(indexShard.shardId(), indexMetaData
.getNumberOfShards(),
SearchType.QUERY_THEN_FETCH, entry.source().build(queryParseContext), entry.types(), entry.requestCache());
context = createContext(request, warmerContext.searcher());
// if we use sort, we need to do query to sort on
// it and load relevant field data
// if not, we might as well set size=0 (and cache
// if needed)
if (context.sort() == null) {
context.size(0);
} }
boolean canCache = indicesQueryCache.canCache(request, context);
// early terminate when we can cache, since we
// can only do proper caching on top level searcher
// also, if we can't cache, and its top, we don't
// need to execute it, since we already did when its
// not top
if (canCache != top) {
return;
}
loadOrExecuteQueryPhase(request, context, queryPhase);
long took = System.nanoTime() - now;
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed [{}], took [{}]", entry.name(), TimeValue.timeValueNanos(took));
}
} catch (Throwable t) {
indexShard.warmerService().logger().warn("warmer [{}] failed", t, entry.name());
} finally { } finally {
try { latch.countDown();
if (context != null) {
freeContext(context.id());
cleanContext(context);
}
} finally {
latch.countDown();
}
} }
} }
}); });
} }
return new TerminationHandle() { return () -> latch.await();
@Override
public void awaitTermination() throws InterruptedException {
latch.await();
}
};
} }
} }

View File

@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.XGeoHashUtils; import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongArray;

View File

@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.XGeoHashUtils; import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.MultiGeoPointValues;
@ -163,7 +163,7 @@ public class GeoHashGridParser implements Aggregator.Parser {
resize(geoValues.count()); resize(geoValues.count());
for (int i = 0; i < count(); ++i) { for (int i = 0; i < count(); ++i) {
GeoPoint target = geoValues.valueAt(i); GeoPoint target = geoValues.valueAt(i);
values[i] = XGeoHashUtils.longEncode(target.getLon(), target.getLat(), precision); values[i] = GeoHashUtils.longEncode(target.getLon(), target.getLat(), precision);
} }
sort(); sort();
} }

View File

@ -19,7 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.geogrid; package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.PriorityQueue;
import org.apache.lucene.util.XGeoHashUtils; import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -101,7 +101,7 @@ public class InternalGeoHashGrid extends InternalMultiBucketAggregation<Internal
@Override @Override
public String getKeyAsString() { public String getKeyAsString() {
return XGeoHashUtils.stringEncode(geohashAsLong); return GeoHashUtils.stringEncode(geohashAsLong);
} }
@Override @Override

View File

@ -79,7 +79,7 @@ public class SignificanceHeuristicStreams {
* @param name The given name * @param name The given name
* @return The associated stream * @return The associated stream
*/ */
public static synchronized Stream stream(String name) { private static synchronized Stream stream(String name) {
return STREAMS.get(name); return STREAMS.get(name);
} }

View File

@ -20,8 +20,8 @@
package org.elasticsearch.search.aggregations.metrics.geocentroid; package org.elasticsearch.search.aggregations.metrics.geocentroid;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.XGeoHashUtils; import org.apache.lucene.util.GeoHashUtils;
import org.apache.lucene.util.XGeoUtils; import org.apache.lucene.util.GeoUtils;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
@ -96,7 +96,7 @@ public final class GeoCentroidAggregator extends MetricsAggregator {
pt[0] = pt[0] + (value.getLon() - pt[0]) / ++prevCounts; pt[0] = pt[0] + (value.getLon() - pt[0]) / ++prevCounts;
pt[1] = pt[1] + (value.getLat() - pt[1]) / prevCounts; pt[1] = pt[1] + (value.getLat() - pt[1]) / prevCounts;
} }
centroids.set(bucket, XGeoUtils.mortonHash(pt[0], pt[1])); centroids.set(bucket, GeoUtils.mortonHash(pt[0], pt[1]));
} }
} }
}; };

View File

@ -19,7 +19,7 @@
package org.elasticsearch.search.aggregations.metrics.geocentroid; package org.elasticsearch.search.aggregations.metrics.geocentroid;
import org.apache.lucene.util.XGeoUtils; import org.apache.lucene.util.GeoUtils;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -140,7 +140,7 @@ public class InternalGeoCentroid extends InternalMetricsAggregation implements G
out.writeVLong(count); out.writeVLong(count);
if (centroid != null) { if (centroid != null) {
out.writeBoolean(true); out.writeBoolean(true);
out.writeLong(XGeoUtils.mortonHash(centroid.lon(), centroid.lat())); out.writeLong(GeoUtils.mortonHash(centroid.lon(), centroid.lat()));
} else { } else {
out.writeBoolean(false); out.writeBoolean(false);
} }

View File

@ -79,7 +79,7 @@ public class MovAvgModelStreams {
* @param name The given name * @param name The given name
* @return The associated stream * @return The associated stream
*/ */
public static synchronized Stream stream(String name) { private static synchronized Stream stream(String name) {
return STREAMS.get(name); return STREAMS.get(name);
} }

View File

@ -18,7 +18,7 @@
*/ */
package org.elasticsearch.search.aggregations.support.format; package org.elasticsearch.search.aggregations.support.format;
import org.apache.lucene.util.XGeoHashUtils; import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
@ -250,7 +250,7 @@ public interface ValueFormatter extends Streamable {
@Override @Override
public String format(long value) { public String format(long value) {
return XGeoHashUtils.stringEncode(value); return GeoHashUtils.stringEncode(value);
} }
@Override @Override

View File

@ -724,8 +724,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (context.parseFieldMatcher().match(currentFieldName, TRACK_SCORES_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, TRACK_SCORES_FIELD)) {
builder.trackScores = parser.booleanValue(); builder.trackScores = parser.booleanValue();
} else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
FetchSourceContext fetchSourceContext = FetchSourceContext.parse(parser, context); builder.fetchSourceContext = FetchSourceContext.parse(parser, context);
builder.fetchSourceContext = fetchSourceContext;
} else if (context.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
List<String> fieldNames = new ArrayList<>(); List<String> fieldNames = new ArrayList<>();
fieldNames.add(parser.text()); fieldNames.add(parser.text());
@ -742,8 +741,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (context.parseFieldMatcher().match(currentFieldName, POST_FILTER_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, POST_FILTER_FIELD)) {
builder.postQueryBuilder = context.parseInnerQueryBuilder(); builder.postQueryBuilder = context.parseInnerQueryBuilder();
} else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
FetchSourceContext fetchSourceContext = FetchSourceContext.parse(parser, context); builder.fetchSourceContext = FetchSourceContext.parse(parser, context);
builder.fetchSourceContext = fetchSourceContext;
} else if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELDS_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, SCRIPT_FIELDS_FIELD)) {
List<ScriptField> scriptFields = new ArrayList<>(); List<ScriptField> scriptFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -886,8 +884,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
builder.stats = stats; builder.stats = stats;
} else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) { } else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
FetchSourceContext fetchSourceContext = FetchSourceContext.parse(parser, context); builder.fetchSourceContext = FetchSourceContext.parse(parser, context);
builder.fetchSourceContext = fetchSourceContext;
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
parser.getTokenLocation()); parser.getTokenLocation());

View File

@ -164,7 +164,7 @@ public class HighlightBuilder implements ToXContent {
} }
/** /**
* Set this to true when using the highlighterType <tt>fast-vector-highlighter</tt> * Set this to true when using the highlighterType <tt>fvh</tt>
* and you want to provide highlighting on filter clauses in your * and you want to provide highlighting on filter clauses in your
* query. Default is <tt>false</tt>. * query. Default is <tt>false</tt>.
*/ */
@ -237,7 +237,7 @@ public class HighlightBuilder implements ToXContent {
} }
/** /**
* When using the highlighterType <tt>fast-vector-highlighter</tt> this setting * When using the highlighterType <tt>fvh</tt> this setting
* controls how far to look for boundary characters, and defaults to 20. * controls how far to look for boundary characters, and defaults to 20.
*/ */
public HighlightBuilder boundaryMaxScan(Integer boundaryMaxScan) { public HighlightBuilder boundaryMaxScan(Integer boundaryMaxScan) {
@ -246,7 +246,7 @@ public class HighlightBuilder implements ToXContent {
} }
/** /**
* When using the highlighterType <tt>fast-vector-highlighter</tt> this setting * When using the highlighterType <tt>fvh</tt> this setting
* defines what constitutes a boundary for highlighting. Its a single string with * defines what constitutes a boundary for highlighting. Its a single string with
* each boundary character defined in it. It defaults to .,!? \t\n * each boundary character defined in it. It defaults to .,!? \t\n
*/ */
@ -256,8 +256,8 @@ public class HighlightBuilder implements ToXContent {
} }
/** /**
* Set type of highlighter to use. Supported types * Set type of highlighter to use. Out of the box supported types
* are <tt>highlighter</tt>, <tt>fast-vector-highlighter</tt> and <tt>postings-highlighter</tt>. * are <tt>plain</tt>, <tt>fvh</tt> and <tt>postings</tt>.
* The default option selected is dependent on the mappings defined for your index. * The default option selected is dependent on the mappings defined for your index.
* Details of the different highlighter types are covered in the reference guide. * Details of the different highlighter types are covered in the reference guide.
*/ */
@ -568,8 +568,8 @@ public class HighlightBuilder implements ToXContent {
} }
/** /**
* Set type of highlighter to use. Supported types * Set type of highlighter to use. Out of the box supported types
* are <tt>highlighter</tt>, <tt>fast-vector-highlighter</tt> nad <tt>postings-highlighter</tt>. * are <tt>plain</tt>, <tt>fvh</tt> and <tt>postings</tt>.
* This overrides global settings set by {@link HighlightBuilder#highlighterType(String)}. * This overrides global settings set by {@link HighlightBuilder#highlighterType(String)}.
*/ */
public Field highlighterType(String highlighterType) { public Field highlighterType(String highlighterType) {

View File

@ -19,8 +19,6 @@
package org.elasticsearch.search.highlight; package org.elasticsearch.search.highlight;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.common.util.ExtensionPoint;
@ -31,26 +29,18 @@ import java.util.*;
*/ */
public class Highlighters extends ExtensionPoint.ClassMap<Highlighter> { public class Highlighters extends ExtensionPoint.ClassMap<Highlighter> {
@Deprecated // remove in 3.0
private static final String FAST_VECTOR_HIGHLIGHTER = "fast-vector-highlighter";
private static final String FVH = "fvh"; private static final String FVH = "fvh";
@Deprecated // remove in 3.0
private static final String HIGHLIGHTER = "highlighter";
private static final String PLAIN = "plain"; private static final String PLAIN = "plain";
@Deprecated // remove in 3.0
private static final String POSTINGS_HIGHLIGHTER = "postings-highlighter";
private static final String POSTINGS = "postings"; private static final String POSTINGS = "postings";
private final Map<String, Highlighter> parsers; private final Map<String, Highlighter> parsers;
private final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Highlighters.class.getName()));
public Highlighters(){ public Highlighters(){
this(Collections.EMPTY_MAP); this(Collections.emptyMap());
} }
private Highlighters(Map<String, Highlighter> parsers) { private Highlighters(Map<String, Highlighter> parsers) {
super("highlighter", Highlighter.class, new HashSet<>(Arrays.asList(FVH, FAST_VECTOR_HIGHLIGHTER, PLAIN, HIGHLIGHTER, POSTINGS, POSTINGS_HIGHLIGHTER)), super("highlighter", Highlighter.class, new HashSet<>(Arrays.asList(FVH, PLAIN, POSTINGS)),
Highlighters.class); Highlighters.class);
this.parsers = Collections.unmodifiableMap(parsers); this.parsers = Collections.unmodifiableMap(parsers);
} }
@ -61,31 +51,15 @@ public class Highlighters extends ExtensionPoint.ClassMap<Highlighter> {
} }
private static Map<String, Highlighter> addBuiltIns(Settings settings, Map<String, Highlighter> parsers) { private static Map<String, Highlighter> addBuiltIns(Settings settings, Map<String, Highlighter> parsers) {
// build in highlighers
Map<String, Highlighter> map = new HashMap<>(); Map<String, Highlighter> map = new HashMap<>();
map.put(FVH, new FastVectorHighlighter(settings)); map.put(FVH, new FastVectorHighlighter(settings));
map.put(FAST_VECTOR_HIGHLIGHTER, map.get(FVH));
map.put(PLAIN, new PlainHighlighter()); map.put(PLAIN, new PlainHighlighter());
map.put(HIGHLIGHTER, map.get(PLAIN));
map.put(POSTINGS, new PostingsHighlighter()); map.put(POSTINGS, new PostingsHighlighter());
map.put(POSTINGS_HIGHLIGHTER, map.get(POSTINGS));
map.putAll(parsers); map.putAll(parsers);
return map; return map;
} }
public Highlighter get(String type) { public Highlighter get(String type) {
switch (type) {
case FAST_VECTOR_HIGHLIGHTER:
deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", FAST_VECTOR_HIGHLIGHTER, FVH);
break;
case HIGHLIGHTER:
deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", HIGHLIGHTER, PLAIN);
break;
case POSTINGS_HIGHLIGHTER:
deprecationLogger.deprecated("highlighter key [{}] is deprecated and will be removed in 3.x use [{}] instead", POSTINGS_HIGHLIGHTER, POSTINGS);
break;
}
return parsers.get(type); return parsers.get(type);
} }
} }

View File

@ -92,7 +92,6 @@ public class QueryPhase implements SearchPhase {
parseElements.put("query", new QueryParseElement()); parseElements.put("query", new QueryParseElement());
parseElements.put("queryBinary", new QueryBinaryParseElement()); parseElements.put("queryBinary", new QueryBinaryParseElement());
parseElements.put("query_binary", new QueryBinaryParseElement()); parseElements.put("query_binary", new QueryBinaryParseElement());
parseElements.put("filter", new PostFilterParseElement()); // For bw comp reason, should be removed in version 1.1
parseElements.put("post_filter", new PostFilterParseElement()); parseElements.put("post_filter", new PostFilterParseElement());
parseElements.put("postFilter", new PostFilterParseElement()); parseElements.put("postFilter", new PostFilterParseElement());
parseElements.put("filterBinary", new FilterBinaryParseElement()); parseElements.put("filterBinary", new FilterBinaryParseElement());

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.suggest.completion; package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -43,7 +42,6 @@ import static org.elasticsearch.search.suggest.SuggestUtils.parseSuggestContext;
public class CompletionSuggestParser implements SuggestContextParser { public class CompletionSuggestParser implements SuggestContextParser {
private CompletionSuggester completionSuggester; private CompletionSuggester completionSuggester;
private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("edit_distance");
public CompletionSuggestParser(CompletionSuggester completionSuggester) { public CompletionSuggestParser(CompletionSuggester completionSuggester) {
this.completionSuggester = completionSuggester; this.completionSuggester = completionSuggester;
@ -75,7 +73,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
fuzzyConfigName = parser.currentName(); fuzzyConfigName = parser.currentName();
} else if (token.isValue()) { } else if (token.isValue()) {
if (queryParserService.parseFieldMatcher().match(fuzzyConfigName, FUZZINESS)) { if (queryParserService.parseFieldMatcher().match(fuzzyConfigName, Fuzziness.FIELD)) {
suggestion.setFuzzyEditDistance(Fuzziness.parse(parser).asDistance()); suggestion.setFuzzyEditDistance(Fuzziness.parse(parser).asDistance());
} else if ("transpositions".equals(fuzzyConfigName)) { } else if ("transpositions".equals(fuzzyConfigName)) {
suggestion.setFuzzyTranspositions(parser.booleanValue()); suggestion.setFuzzyTranspositions(parser.booleanValue());

View File

@ -24,7 +24,7 @@ import org.apache.lucene.analysis.PrefixAnalyzer.PrefixTokenFilter;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.XGeoHashUtils; import org.apache.lucene.util.GeoHashUtils;
import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.Operations;
@ -227,7 +227,7 @@ public class GeolocationContextMapping extends ContextMapping {
if(parser.nextToken() == Token.VALUE_NUMBER) { if(parser.nextToken() == Token.VALUE_NUMBER) {
double lat = parser.doubleValue(); double lat = parser.doubleValue();
if(parser.nextToken() == Token.END_ARRAY) { if(parser.nextToken() == Token.END_ARRAY) {
return Collections.singleton(XGeoHashUtils.stringEncode(lon, lat)); return Collections.singleton(GeoHashUtils.stringEncode(lon, lat));
} else { } else {
throw new ElasticsearchParseException("only two values expected"); throw new ElasticsearchParseException("only two values expected");
} }
@ -294,7 +294,7 @@ public class GeolocationContextMapping extends ContextMapping {
* @return new geolocation query * @return new geolocation query
*/ */
public static GeoQuery query(String name, double lat, double lon, int ... precisions) { public static GeoQuery query(String name, double lat, double lon, int ... precisions) {
return query(name, XGeoHashUtils.stringEncode(lon, lat), precisions); return query(name, GeoHashUtils.stringEncode(lon, lat), precisions);
} }
public static GeoQuery query(String name, double lat, double lon, String ... precisions) { public static GeoQuery query(String name, double lat, double lon, String ... precisions) {
@ -302,7 +302,7 @@ public class GeolocationContextMapping extends ContextMapping {
for (int i = 0 ; i < precisions.length; i++) { for (int i = 0 ; i < precisions.length; i++) {
precisionInts[i] = GeoUtils.geoHashLevelsForPrecision(precisions[i]); precisionInts[i] = GeoUtils.geoHashLevelsForPrecision(precisions[i]);
} }
return query(name, XGeoHashUtils.stringEncode(lon, lat), precisionInts); return query(name, GeoHashUtils.stringEncode(lon, lat), precisionInts);
} }
/** /**
@ -574,7 +574,7 @@ public class GeolocationContextMapping extends ContextMapping {
* @return this * @return this
*/ */
public Builder addDefaultLocation(double lat, double lon) { public Builder addDefaultLocation(double lat, double lon) {
this.defaultLocations.add(XGeoHashUtils.stringEncode(lon, lat)); this.defaultLocations.add(GeoHashUtils.stringEncode(lon, lat));
return this; return this;
} }
@ -604,7 +604,7 @@ public class GeolocationContextMapping extends ContextMapping {
@Override @Override
public GeolocationContextMapping build() { public GeolocationContextMapping build() {
if(precisions.isEmpty()) { if(precisions.isEmpty()) {
precisions.add(XGeoHashUtils.PRECISION); precisions.add(GeoHashUtils.PRECISION);
} }
int[] precisionArray = precisions.toArray(); int[] precisionArray = precisions.toArray();
Arrays.sort(precisionArray); Arrays.sort(precisionArray);
@ -670,7 +670,7 @@ public class GeolocationContextMapping extends ContextMapping {
int precision = Math.min(p, geohash.length()); int precision = Math.min(p, geohash.length());
String truncatedGeohash = geohash.substring(0, precision); String truncatedGeohash = geohash.substring(0, precision);
if(mapping.neighbors) { if(mapping.neighbors) {
XGeoHashUtils.addNeighbors(truncatedGeohash, precision, locations); GeoHashUtils.addNeighbors(truncatedGeohash, precision, locations);
} }
locations.add(truncatedGeohash); locations.add(truncatedGeohash);
} }

View File

@ -31,46 +31,12 @@ grant codeBase "file:${{java.ext.dirs}}/*" {
//// Very special jar permissions: //// Very special jar permissions:
//// These are dangerous permissions that we don't want to grant to everything. //// These are dangerous permissions that we don't want to grant to everything.
grant codeBase "${es.security.jar.lucene.core}" { grant codeBase "${codebase.lucene-core-5.4.0-snapshot-1710880.jar}" {
// needed to allow MMapDirectory's "unmap hack" // needed to allow MMapDirectory's "unmap hack"
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
}; };
//// test framework permissions.
//// These are mock objects and test management that we allow test framework libs
//// to provide on our behalf. But tests themselves cannot do this stuff!
grant codeBase "${es.security.jar.elasticsearch.securemock}" {
// needed to access ReflectionFactory (see below)
permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect";
// needed to support creation of mocks
permission java.lang.RuntimePermission "reflectionFactoryAccess";
// needed for spy interception, etc
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
grant codeBase "${es.security.jar.lucene.testframework}" {
// needed by RamUsageTester
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
grant codeBase "${es.security.jar.randomizedtesting.runner}" {
// optionally needed for access to private test methods (e.g. beforeClass)
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
// needed for top threads handling
permission java.lang.RuntimePermission "modifyThreadGroup";
};
grant codeBase "${es.security.jar.randomizedtesting.junit4}" {
// needed for gson serialization
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
// needed for stream redirection
permission java.lang.RuntimePermission "setIO";
};
//// Everything else: //// Everything else:
grant { grant {
@ -107,10 +73,6 @@ grant {
// otherwise can be provided only to test libraries // otherwise can be provided only to test libraries
permission java.lang.RuntimePermission "getStackTrace"; permission java.lang.RuntimePermission "getStackTrace";
// needed by ESTestCase for leniency of thread exceptions (?!)
// otherwise can be provided only to test libraries
permission java.lang.RuntimePermission "setDefaultUncaughtExceptionHandler";
// needed by JMX instead of getFileSystemAttributes, seems like a bug... // needed by JMX instead of getFileSystemAttributes, seems like a bug...
permission java.lang.RuntimePermission "getFileStoreAttributes"; permission java.lang.RuntimePermission "getFileStoreAttributes";
@ -126,10 +88,4 @@ grant {
// needed by JDKESLoggerTests // needed by JDKESLoggerTests
permission java.util.logging.LoggingPermission "control"; permission java.util.logging.LoggingPermission "control";
// needed to install SSLFactories, advanced SSL configuration, etc.
permission java.lang.RuntimePermission "setFactory";
// needed to allow installation of bouncycastle crypto provider
permission java.security.SecurityPermission "putProviderProperty.BC";
}; };

View File

@ -0,0 +1,50 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
//// additional test framework permissions.
//// These are mock objects and test management that we allow test framework libs
//// to provide on our behalf. But tests themselves cannot do this stuff!
grant codeBase "${codebase.securemock-1.1.jar}" {
// needed to access ReflectionFactory (see below)
permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect";
// needed to support creation of mocks
permission java.lang.RuntimePermission "reflectionFactoryAccess";
// needed for spy interception, etc
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
grant codeBase "${codebase.lucene-test-framework-5.4.0-snapshot-1710880.jar}" {
// needed by RamUsageTester
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
grant codeBase "${codebase.randomizedtesting-runner-2.2.0.jar}" {
// optionally needed for access to private test methods (e.g. beforeClass)
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
// needed to fail tests on uncaught exceptions from other threads
permission java.lang.RuntimePermission "setDefaultUncaughtExceptionHandler";
// needed for top threads handling
permission java.lang.RuntimePermission "modifyThreadGroup";
};
grant codeBase "${codebase.junit4-ant-2.2.0.jar}" {
// needed for stream redirection
permission java.lang.RuntimePermission "setIO";
};

View File

@ -367,7 +367,7 @@ public class TransportReplicationActionTests extends ESTestCase {
} }
int pending = replicationPhase.pending(); int pending = replicationPhase.pending();
int criticalFailures = 0; // failures that should fail the shard int criticalFailures = 0; // failures that should fail the shard
int successfull = 1; int successful = 1;
for (CapturingTransport.CapturedRequest capturedRequest : capturedRequests) { for (CapturingTransport.CapturedRequest capturedRequest : capturedRequests) {
if (randomBoolean()) { if (randomBoolean()) {
Throwable t; Throwable t;
@ -380,19 +380,19 @@ public class TransportReplicationActionTests extends ESTestCase {
logger.debug("--> simulating failure on {} with [{}]", capturedRequest.node, t.getClass().getSimpleName()); logger.debug("--> simulating failure on {} with [{}]", capturedRequest.node, t.getClass().getSimpleName());
transport.handleResponse(capturedRequest.requestId, t); transport.handleResponse(capturedRequest.requestId, t);
} else { } else {
successfull++; successful++;
transport.handleResponse(capturedRequest.requestId, TransportResponse.Empty.INSTANCE); transport.handleResponse(capturedRequest.requestId, TransportResponse.Empty.INSTANCE);
} }
pending--; pending--;
assertThat(replicationPhase.pending(), equalTo(pending)); assertThat(replicationPhase.pending(), equalTo(pending));
assertThat(replicationPhase.successful(), equalTo(successfull)); assertThat(replicationPhase.successful(), equalTo(successful));
} }
assertThat(listener.isDone(), equalTo(true)); assertThat(listener.isDone(), equalTo(true));
Response response = listener.get(); Response response = listener.get();
final ActionWriteResponse.ShardInfo shardInfo = response.getShardInfo(); final ActionWriteResponse.ShardInfo shardInfo = response.getShardInfo();
assertThat(shardInfo.getFailed(), equalTo(criticalFailures)); assertThat(shardInfo.getFailed(), equalTo(criticalFailures));
assertThat(shardInfo.getFailures(), arrayWithSize(criticalFailures)); assertThat(shardInfo.getFailures(), arrayWithSize(criticalFailures));
assertThat(shardInfo.getSuccessful(), equalTo(successfull)); assertThat(shardInfo.getSuccessful(), equalTo(successful));
assertThat(shardInfo.getTotal(), equalTo(totalShards)); assertThat(shardInfo.getTotal(), equalTo(totalShards));
assertThat("failed to see enough shard failures", transport.capturedRequests().length, equalTo(criticalFailures)); assertThat("failed to see enough shard failures", transport.capturedRequests().length, equalTo(criticalFailures));

View File

@ -19,28 +19,37 @@
package org.elasticsearch.bootstrap; package org.elasticsearch.bootstrap;
import com.carrotsearch.randomizedtesting.RandomizedRunner;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestSecurityManager; import org.apache.lucene.util.TestSecurityManager;
import org.elasticsearch.bootstrap.Bootstrap; import org.elasticsearch.bootstrap.Bootstrap;
import org.elasticsearch.bootstrap.ESPolicy; import org.elasticsearch.bootstrap.ESPolicy;
import org.elasticsearch.bootstrap.Security; import org.elasticsearch.bootstrap.Security;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.plugins.PluginInfo; import org.elasticsearch.plugins.PluginInfo;
import org.junit.Assert;
import java.io.FilePermission; import java.io.FilePermission;
import java.io.InputStream; import java.io.InputStream;
import java.net.URI;
import java.net.URL; import java.net.URL;
import java.nio.file.Path; import java.nio.file.Path;
import java.security.Permission; import java.security.Permission;
import java.security.PermissionCollection;
import java.security.Permissions; import java.security.Permissions;
import java.security.Policy; import java.security.Policy;
import java.security.URIParameter; import java.security.ProtectionDomain;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Properties; import java.util.Properties;
import java.util.Set;
import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean;
@ -83,7 +92,6 @@ public class BootstrapForTesting {
// install security manager if requested // install security manager if requested
if (systemPropertyAsBoolean("tests.security.manager", true)) { if (systemPropertyAsBoolean("tests.security.manager", true)) {
try { try {
Security.setCodebaseProperties();
// initialize paths the same exact way as bootstrap // initialize paths the same exact way as bootstrap
Permissions perms = new Permissions(); Permissions perms = new Permissions();
// add permissions to everything in classpath // add permissions to everything in classpath
@ -121,30 +129,16 @@ public class BootstrapForTesting {
perms.add(new RuntimePermission("setIO")); perms.add(new RuntimePermission("setIO"));
} }
final Policy policy; // read test-framework permissions
// if its a plugin with special permissions, we use a wrapper policy impl to try final Policy testFramework = Security.readPolicy(Bootstrap.class.getResource("test-framework.policy"), JarHell.parseClassPath());
// to simulate what happens with a real distribution final Policy esPolicy = new ESPolicy(perms, getPluginPermissions());
List<URL> pluginPolicies = Collections.list(BootstrapForTesting.class.getClassLoader().getResources(PluginInfo.ES_PLUGIN_POLICY)); Policy.setPolicy(new Policy() {
if (!pluginPolicies.isEmpty()) { @Override
Permissions extra = new Permissions(); public boolean implies(ProtectionDomain domain, Permission permission) {
for (URL url : pluginPolicies) { // implements union
URI uri = url.toURI(); return esPolicy.implies(domain, permission) || testFramework.implies(domain, permission);
Policy pluginPolicy = Policy.getInstance("JavaPolicy", new URIParameter(uri));
PermissionCollection permissions = pluginPolicy.getPermissions(BootstrapForTesting.class.getProtectionDomain());
// this method is supported with the specific implementation we use, but just check for safety.
if (permissions == Policy.UNSUPPORTED_EMPTY_COLLECTION) {
throw new UnsupportedOperationException("JavaPolicy implementation does not support retrieving permissions");
}
for (Permission permission : Collections.list(permissions.elements())) {
extra.add(permission);
}
} }
// TODO: try to get rid of this class now that the world is simpler? });
policy = new MockPluginPolicy(perms, extra);
} else {
policy = new ESPolicy(perms, Collections.emptyMap());
}
Policy.setPolicy(policy);
System.setSecurityManager(new TestSecurityManager()); System.setSecurityManager(new TestSecurityManager());
Security.selfTest(); Security.selfTest();
@ -168,6 +162,71 @@ public class BootstrapForTesting {
} }
} }
/**
* we dont know which codesources belong to which plugin, so just remove the permission from key codebases
* like core, test-framework, etc. this way tests fail if accesscontroller blocks are missing.
*/
@SuppressForbidden(reason = "accesses fully qualified URLs to configure security")
static Map<String,Policy> getPluginPermissions() throws Exception {
List<URL> pluginPolicies = Collections.list(BootstrapForTesting.class.getClassLoader().getResources(PluginInfo.ES_PLUGIN_POLICY));
if (pluginPolicies.isEmpty()) {
return Collections.emptyMap();
}
// compute classpath minus obvious places, all other jars will get the permission.
Set<URL> codebases = new HashSet<>(Arrays.asList(parseClassPathWithSymlinks()));
Set<URL> excluded = new HashSet<>(Arrays.asList(
// es core
Bootstrap.class.getProtectionDomain().getCodeSource().getLocation(),
// es test framework
BootstrapForTesting.class.getProtectionDomain().getCodeSource().getLocation(),
// lucene test framework
LuceneTestCase.class.getProtectionDomain().getCodeSource().getLocation(),
// randomized runner
RandomizedRunner.class.getProtectionDomain().getCodeSource().getLocation(),
// junit library
Assert.class.getProtectionDomain().getCodeSource().getLocation()
));
codebases.removeAll(excluded);
// parse each policy file, with codebase substitution from the classpath
final List<Policy> policies = new ArrayList<>();
for (URL policyFile : pluginPolicies) {
policies.add(Security.readPolicy(policyFile, codebases.toArray(new URL[codebases.size()])));
}
// consult each policy file for those codebases
Map<String,Policy> map = new HashMap<>();
for (URL url : codebases) {
map.put(url.getFile(), new Policy() {
@Override
public boolean implies(ProtectionDomain domain, Permission permission) {
// implements union
for (Policy p : policies) {
if (p.implies(domain, permission)) {
return true;
}
}
return false;
}
});
}
return Collections.unmodifiableMap(map);
}
/**
* return parsed classpath, but with symlinks resolved to destination files for matching
* this is for matching the toRealPath() in the code where we have a proper plugin structure
*/
@SuppressForbidden(reason = "does evil stuff with paths and urls because devs and jenkins do evil stuff with paths and urls")
static URL[] parseClassPathWithSymlinks() throws Exception {
URL raw[] = JarHell.parseClassPath();
for (int i = 0; i < raw.length; i++) {
raw[i] = PathUtils.get(raw[i].toURI()).toRealPath().toUri().toURL();
}
return raw;
}
// does nothing, just easy way to make sure the class is loaded. // does nothing, just easy way to make sure the class is loaded.
public static void ensureInitialized() {} public static void ensureInitialized() {}
} }

View File

@ -1,101 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.bootstrap;
import com.carrotsearch.randomizedtesting.RandomizedRunner;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.common.logging.Loggers;
import org.junit.Assert;
import java.net.URL;
import java.security.CodeSource;
import java.security.Permission;
import java.security.PermissionCollection;
import java.security.Policy;
import java.security.ProtectionDomain;
import java.security.cert.Certificate;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
* Simulates in unit tests per-plugin permissions.
* Unit tests for plugins do not have a proper plugin structure,
* so we don't know which codebases to apply the permission to.
* <p>
* As an approximation, we just exclude es/test/framework classes,
* because they will be present in stacks and fail tests for the
* simple case where an AccessController block is missing, because
* java security checks every codebase in the stacktrace, and we
* are sure to pollute it.
*/
final class MockPluginPolicy extends Policy {
final ESPolicy standardPolicy;
final PermissionCollection extraPermissions;
final Set<CodeSource> excludedSources;
/**
* Create a new MockPluginPolicy with dynamic {@code permissions} and
* adding the extra plugin permissions from {@code insecurePluginProp} to
* all code except test classes.
*/
MockPluginPolicy(PermissionCollection standard, PermissionCollection extra) throws Exception {
// the hack begins!
this.standardPolicy = new ESPolicy(standard, Collections.emptyMap());
this.extraPermissions = extra;
excludedSources = new HashSet<CodeSource>();
// exclude some obvious places
// es core
excludedSources.add(Bootstrap.class.getProtectionDomain().getCodeSource());
// es test framework
excludedSources.add(getClass().getProtectionDomain().getCodeSource());
// lucene test framework
excludedSources.add(LuceneTestCase.class.getProtectionDomain().getCodeSource());
// test runner
excludedSources.add(RandomizedRunner.class.getProtectionDomain().getCodeSource());
// junit library
excludedSources.add(Assert.class.getProtectionDomain().getCodeSource());
// scripts
excludedSources.add(new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[])null));
Loggers.getLogger(getClass()).debug("Apply extra permissions [{}] excluding codebases [{}]", extraPermissions, excludedSources);
}
@Override
public boolean implies(ProtectionDomain domain, Permission permission) {
CodeSource codeSource = domain.getCodeSource();
// codesource can be null when reducing privileges via doPrivileged()
if (codeSource == null) {
return false;
}
if (standardPolicy.implies(domain, permission)) {
return true;
} else if (excludedSources.contains(codeSource) == false &&
codeSource.toString().contains("test-classes") == false) {
return extraPermissions.implies(permission);
} else {
return false;
}
}
}

View File

@ -141,11 +141,11 @@ public class DiskUsageTests extends ESTestCase {
}; };
NodeStats[] nodeStats = new NodeStats[] { NodeStats[] nodeStats = new NodeStats[] {
new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT), 0,
null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null), null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null,null),
new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, Version.CURRENT), 0,
null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null), null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null,null),
new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, Version.CURRENT), 0,
null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null) null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null,null)
}; };
InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvaiableUsages, newMostAvaiableUsages); InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvaiableUsages, newMostAvaiableUsages);
DiskUsage leastNode_1 = newLeastAvaiableUsages.get("node_1"); DiskUsage leastNode_1 = newLeastAvaiableUsages.get("node_1");

View File

@ -73,7 +73,7 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService {
null, null, null, null, null, null, null, null, null, null,
fsInfo, fsInfo,
null, null, null, null, null, null,
null); null, null);
} }
@Inject @Inject

View File

@ -0,0 +1,157 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.action.shard;
import org.apache.lucene.index.CorruptIndexException;
import org.elasticsearch.action.search.TransportSearchAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.cluster.TestClusterService;
import org.elasticsearch.test.transport.CapturingTransport;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportService;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithStartedPrimary;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.junit.Assert.*;
public class ShardStateActionTests extends ESTestCase {
private static ThreadPool THREAD_POOL;
private ShardStateAction shardStateAction;
private CapturingTransport transport;
private TransportService transportService;
private TestClusterService clusterService;
@BeforeClass
public static void startThreadPool() {
THREAD_POOL = new ThreadPool("ShardStateActionTest");
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
this.transport = new CapturingTransport();
clusterService = new TestClusterService(THREAD_POOL);
transportService = new TransportService(transport, THREAD_POOL);
transportService.start();
shardStateAction = new ShardStateAction(Settings.EMPTY, clusterService, transportService, null, null);
}
@Override
@After
public void tearDown() throws Exception {
transportService.stop();
super.tearDown();
}
@AfterClass
public static void stopThreadPool() {
ThreadPool.terminate(THREAD_POOL, 30, TimeUnit.SECONDS);
THREAD_POOL = null;
}
public void testNoMaster() {
final String index = "test";
clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5)));
DiscoveryNodes.Builder builder = DiscoveryNodes.builder(clusterService.state().nodes());
builder.masterNodeId(null);
clusterService.setState(ClusterState.builder(clusterService.state()).nodes(builder));
String indexUUID = clusterService.state().metaData().index(index).getIndexUUID();
AtomicBoolean noMaster = new AtomicBoolean();
assert !noMaster.get();
shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() {
@Override
public void onShardFailedNoMaster() {
noMaster.set(true);
}
@Override
public void onShardFailedFailure(DiscoveryNode master, TransportException e) {
}
});
assertTrue(noMaster.get());
}
public void testFailure() {
final String index = "test";
clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5)));
String indexUUID = clusterService.state().metaData().index(index).getIndexUUID();
AtomicBoolean failure = new AtomicBoolean();
assert !failure.get();
shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() {
@Override
public void onShardFailedNoMaster() {
}
@Override
public void onShardFailedFailure(DiscoveryNode master, TransportException e) {
failure.set(true);
}
});
final CapturingTransport.CapturedRequest[] capturedRequests = transport.capturedRequests();
transport.clear();
assertThat(capturedRequests.length, equalTo(1));
assert !failure.get();
transport.handleResponse(capturedRequests[0].requestId, new TransportException("simulated"));
assertTrue(failure.get());
}
private ShardRouting getRandomShardRouting(String index) {
IndexRoutingTable indexRoutingTable = clusterService.state().routingTable().index(index);
ShardsIterator shardsIterator = indexRoutingTable.randomAllActiveShardsIt();
ShardRouting shardRouting = shardsIterator.nextOrNull();
assert shardRouting != null;
return shardRouting;
}
private Throwable getSimulatedFailure() {
return new CorruptIndexException("simulated", (String) null);
}
}

View File

@ -847,6 +847,19 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
assertThat(results, arrayContainingInAnyOrder("foo1-closed", "foo2-closed", "foo3")); assertThat(results, arrayContainingInAnyOrder("foo1-closed", "foo2-closed", "foo3"));
} }
public void testDedupConcreteIndices() {
MetaData.Builder mdBuilder = MetaData.builder()
.put(indexBuilder("index1").putAlias(AliasMetaData.builder("alias1")));
ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build();
IndicesOptions[] indicesOptions = new IndicesOptions[]{ IndicesOptions.strictExpandOpen(), IndicesOptions.strictExpand(),
IndicesOptions.lenientExpandOpen(), IndicesOptions.strictExpandOpenAndForbidClosed()};
for (IndicesOptions options : indicesOptions) {
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(state, options);
String[] results = indexNameExpressionResolver.concreteIndices(context, "index1", "index1", "alias1");
assertThat(results, equalTo(new String[]{"index1"}));
}
}
private MetaData metaDataBuilder(String... indices) { private MetaData metaDataBuilder(String... indices) {
MetaData.Builder mdBuilder = MetaData.builder(); MetaData.Builder mdBuilder = MetaData.builder();
for (String concreteIndex : indices) { for (String concreteIndex : indices) {

View File

@ -22,11 +22,14 @@ package org.elasticsearch.common.cache;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.Before; import org.junit.Before;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadMXBean;
import java.util.*; import java.util.*;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.*;
import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.concurrent.atomic.AtomicReferenceArray;
import java.util.stream.Collectors;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
@ -460,6 +463,25 @@ public class CacheTests extends ESTestCase {
assertEquals(replacements, notifications); assertEquals(replacements, notifications);
} }
public void testComputeIfAbsentLoadsSuccessfully() {
Map<Integer, Integer> map = new HashMap<>();
Cache<Integer, Integer> cache = CacheBuilder.<Integer, Integer>builder().build();
for (int i = 0; i < numberOfEntries; i++) {
try {
cache.computeIfAbsent(i, k -> {
int value = randomInt();
map.put(k, value);
return value;
});
} catch (ExecutionException e) {
fail(e.getMessage());
}
}
for (int i = 0; i < numberOfEntries; i++) {
assertEquals(map.get(i), cache.get(i));
}
}
public void testComputeIfAbsentCallsOnce() throws InterruptedException { public void testComputeIfAbsentCallsOnce() throws InterruptedException {
int numberOfThreads = randomIntBetween(2, 200); int numberOfThreads = randomIntBetween(2, 200);
final Cache<Integer, String> cache = CacheBuilder.<Integer, String>builder().build(); final Cache<Integer, String> cache = CacheBuilder.<Integer, String>builder().build();
@ -502,6 +524,146 @@ public class CacheTests extends ESTestCase {
} }
} }
public void testDependentKeyDeadlock() throws InterruptedException {
class Key {
private final int key;
public Key(int key) {
this.key = key;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Key key1 = (Key) o;
return key == key1.key;
}
@Override
public int hashCode() {
return key % 2;
}
}
int numberOfThreads = randomIntBetween(2, 256);
final Cache<Key, Integer> cache = CacheBuilder.<Key, Integer>builder().build();
CountDownLatch latch = new CountDownLatch(1 + numberOfThreads);
CountDownLatch deadlockLatch = new CountDownLatch(numberOfThreads);
List<Thread> threads = new ArrayList<>();
for (int i = 0; i < numberOfThreads; i++) {
Thread thread = new Thread(() -> {
Random random = new Random(random().nextLong());
latch.countDown();
for (int j = 0; j < numberOfEntries; j++) {
Key key = new Key(random.nextInt(numberOfEntries));
try {
cache.computeIfAbsent(key, k -> {
if (k.key == 0) {
return 0;
} else {
Integer value = cache.get(new Key(k.key / 2));
return value != null ? value : 0;
}
});
} catch (ExecutionException e) {
fail(e.getMessage());
}
}
// successfully avoided deadlock, release the main thread
deadlockLatch.countDown();
});
threads.add(thread);
thread.start();
}
AtomicBoolean deadlock = new AtomicBoolean();
assert !deadlock.get();
// start a watchdog service
ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
scheduler.scheduleAtFixedRate(() -> {
Set<Long> ids = threads.stream().map(t -> t.getId()).collect(Collectors.toSet());
ThreadMXBean mxBean = ManagementFactory.getThreadMXBean();
long[] deadlockedThreads = mxBean.findDeadlockedThreads();
if (!deadlock.get() && deadlockedThreads != null) {
for (long deadlockedThread : deadlockedThreads) {
// ensure that we detected deadlock on our threads
if (ids.contains(deadlockedThread)) {
deadlock.set(true);
// release the main test thread to fail the test
for (int i = 0; i < numberOfThreads; i++) {
deadlockLatch.countDown();
}
break;
}
}
}
}, 1, 1, TimeUnit.SECONDS);
// everything is setup, release the hounds
latch.countDown();
// wait for either deadlock to be detected or the threads to terminate
deadlockLatch.await();
// shutdown the watchdog service
scheduler.shutdown();
assertFalse("deadlock", deadlock.get());
}
public void testCachePollution() throws InterruptedException {
int numberOfThreads = randomIntBetween(2, 200);
final Cache<Integer, String> cache = CacheBuilder.<Integer, String>builder().build();
CountDownLatch latch = new CountDownLatch(1 + numberOfThreads);
List<Thread> threads = new ArrayList<>();
for (int i = 0; i < numberOfThreads; i++) {
Thread thread = new Thread(() -> {
latch.countDown();
Random random = new Random(random().nextLong());
for (int j = 0; j < numberOfEntries; j++) {
Integer key = random.nextInt(numberOfEntries);
boolean first;
boolean second;
do {
first = random.nextBoolean();
second = random.nextBoolean();
} while (first && second);
if (first && !second) {
try {
cache.computeIfAbsent(key, k -> {
if (random.nextBoolean()) {
return Integer.toString(k);
} else {
throw new Exception("testCachePollution");
}
});
} catch (ExecutionException e) {
assertNotNull(e.getCause());
assertThat(e.getCause(), instanceOf(Exception.class));
assertEquals(e.getCause().getMessage(), "testCachePollution");
}
} else if (!first && second) {
cache.invalidate(key);
} else if (!first && !second) {
cache.get(key);
}
}
});
threads.add(thread);
thread.start();
}
latch.countDown();
for (Thread thread : threads) {
thread.join();
}
}
// test that the cache is not corrupted under lots of concurrent modifications, even hitting the same key // test that the cache is not corrupted under lots of concurrent modifications, even hitting the same key
// here be dragons: this test did catch one subtle bug during development; do not remove lightly // here be dragons: this test did catch one subtle bug during development; do not remove lightly
public void testTorture() throws InterruptedException { public void testTorture() throws InterruptedException {

Some files were not shown because too many files have changed in this diff Show More