Upgrade to lucene-6.1.0-snapshot-3a57bea.
This commit is contained in:
parent
a25b8ee1bf
commit
44c653f5a8
|
@ -31,5 +31,3 @@ org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()
|
|||
|
||||
@defaultMessage Soon to be removed
|
||||
org.apache.lucene.document.FieldType#numericType()
|
||||
|
||||
org.apache.lucene.document.InetAddressPoint#newPrefixQuery(java.lang.String, java.net.InetAddress, int) @LUCENE-7232
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
elasticsearch = 5.0.0
|
||||
lucene = 6.0.1
|
||||
lucene = 6.1.0-snapshot-3a57bea
|
||||
|
||||
# optional dependencies
|
||||
spatial4j = 0.6
|
||||
|
|
|
@ -1,117 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
|
||||
/**
|
||||
* Forked utility methods from Lucene's InetAddressPoint until LUCENE-7232 and
|
||||
* LUCENE-7234 are released.
|
||||
*/
|
||||
// TODO: remove me when we upgrade to Lucene 6.1
|
||||
@SuppressForbidden(reason="uses InetAddress.getHostAddress")
|
||||
public final class XInetAddressPoint {
|
||||
|
||||
private XInetAddressPoint() {}
|
||||
|
||||
/** The minimum value that an ip address can hold. */
|
||||
public static final InetAddress MIN_VALUE;
|
||||
/** The maximum value that an ip address can hold. */
|
||||
public static final InetAddress MAX_VALUE;
|
||||
static {
|
||||
MIN_VALUE = InetAddressPoint.decode(new byte[InetAddressPoint.BYTES]);
|
||||
byte[] maxValueBytes = new byte[InetAddressPoint.BYTES];
|
||||
Arrays.fill(maxValueBytes, (byte) 0xFF);
|
||||
MAX_VALUE = InetAddressPoint.decode(maxValueBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link InetAddress} that compares immediately greater than
|
||||
* {@code address}.
|
||||
* @throws ArithmeticException if the provided address is the
|
||||
* {@link #MAX_VALUE maximum ip address}
|
||||
*/
|
||||
public static InetAddress nextUp(InetAddress address) {
|
||||
if (address.equals(MAX_VALUE)) {
|
||||
throw new ArithmeticException("Overflow: there is no greater InetAddress than "
|
||||
+ address.getHostAddress());
|
||||
}
|
||||
byte[] delta = new byte[InetAddressPoint.BYTES];
|
||||
delta[InetAddressPoint.BYTES-1] = 1;
|
||||
byte[] nextUpBytes = new byte[InetAddressPoint.BYTES];
|
||||
NumericUtils.add(InetAddressPoint.BYTES, 0, InetAddressPoint.encode(address), delta, nextUpBytes);
|
||||
return InetAddressPoint.decode(nextUpBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link InetAddress} that compares immediately less than
|
||||
* {@code address}.
|
||||
* @throws ArithmeticException if the provided address is the
|
||||
* {@link #MIN_VALUE minimum ip address}
|
||||
*/
|
||||
public static InetAddress nextDown(InetAddress address) {
|
||||
if (address.equals(MIN_VALUE)) {
|
||||
throw new ArithmeticException("Underflow: there is no smaller InetAddress than "
|
||||
+ address.getHostAddress());
|
||||
}
|
||||
byte[] delta = new byte[InetAddressPoint.BYTES];
|
||||
delta[InetAddressPoint.BYTES-1] = 1;
|
||||
byte[] nextDownBytes = new byte[InetAddressPoint.BYTES];
|
||||
NumericUtils.subtract(InetAddressPoint.BYTES, 0, InetAddressPoint.encode(address), delta, nextDownBytes);
|
||||
return InetAddressPoint.decode(nextDownBytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a prefix query for matching a CIDR network range.
|
||||
*
|
||||
* @param field field name. must not be {@code null}.
|
||||
* @param value any host address
|
||||
* @param prefixLength the network prefix length for this address. This is also known as the subnet mask in the context of IPv4
|
||||
* addresses.
|
||||
* @throws IllegalArgumentException if {@code field} is null, or prefixLength is invalid.
|
||||
* @return a query matching documents with addresses contained within this network
|
||||
*/
|
||||
// TODO: remove me when we upgrade to Lucene 6.0.1
|
||||
public static Query newPrefixQuery(String field, InetAddress value, int prefixLength) {
|
||||
if (value == null) {
|
||||
throw new IllegalArgumentException("InetAddress must not be null");
|
||||
}
|
||||
if (prefixLength < 0 || prefixLength > 8 * value.getAddress().length) {
|
||||
throw new IllegalArgumentException("illegal prefixLength '" + prefixLength
|
||||
+ "'. Must be 0-32 for IPv4 ranges, 0-128 for IPv6 ranges");
|
||||
}
|
||||
// create the lower value by zeroing out the host portion, upper value by filling it with all ones.
|
||||
byte lower[] = value.getAddress();
|
||||
byte upper[] = value.getAddress();
|
||||
for (int i = prefixLength; i < 8 * lower.length; i++) {
|
||||
int m = 1 << (7 - (i & 7));
|
||||
lower[i >> 3] &= ~m;
|
||||
upper[i >> 3] |= m;
|
||||
}
|
||||
try {
|
||||
return InetAddressPoint.newRangeQuery(field, InetAddress.getByAddress(lower), InetAddress.getByAddress(upper));
|
||||
} catch (UnknownHostException e) {
|
||||
throw new AssertionError(e); // values are coming from InetAddress
|
||||
}
|
||||
}
|
||||
}
|
|
@ -283,7 +283,7 @@ public abstract class BlendedTermQuery extends Query {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!super.equals(o)) return false;
|
||||
if (sameClassAs(o) == false) return false;
|
||||
|
||||
BlendedTermQuery that = (BlendedTermQuery) o;
|
||||
return Arrays.equals(equalsTerms(), that.equalsTerms());
|
||||
|
@ -291,7 +291,7 @@ public abstract class BlendedTermQuery extends Query {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), Arrays.hashCode(equalsTerms()));
|
||||
return Objects.hash(classHash(), Arrays.hashCode(equalsTerms()));
|
||||
}
|
||||
|
||||
public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final boolean disableCoord) {
|
||||
|
|
|
@ -44,12 +44,12 @@ public final class MinDocQuery extends Query {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), minDoc);
|
||||
return Objects.hash(classHash(), minDoc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (super.equals(obj) == false) {
|
||||
if (sameClassAs(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
MinDocQuery that = (MinDocQuery) obj;
|
||||
|
|
|
@ -63,9 +63,6 @@ import org.elasticsearch.common.io.PathUtils;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -622,8 +619,12 @@ public long ramBytesUsed() {
|
|||
Set<BytesRef> seenSurfaceForms = new HashSet<>();
|
||||
|
||||
int dedup = 0;
|
||||
while (reader.read(scratch)) {
|
||||
input.reset(scratch.bytes(), 0, scratch.length());
|
||||
while (true) {
|
||||
BytesRef bytes = reader.next();
|
||||
if (bytes == null) {
|
||||
break;
|
||||
}
|
||||
input.reset(bytes.bytes, bytes.offset, bytes.length);
|
||||
short analyzedLength = input.readShort();
|
||||
analyzed.grow(analyzedLength+2);
|
||||
input.readBytes(analyzed.bytes(), 0, analyzedLength);
|
||||
|
@ -631,13 +632,13 @@ public long ramBytesUsed() {
|
|||
|
||||
long cost = input.readInt();
|
||||
|
||||
surface.bytes = scratch.bytes();
|
||||
surface.bytes = bytes.bytes;
|
||||
if (hasPayloads) {
|
||||
surface.length = input.readShort();
|
||||
surface.offset = input.getPosition();
|
||||
} else {
|
||||
surface.offset = input.getPosition();
|
||||
surface.length = scratch.length() - surface.offset;
|
||||
surface.length = bytes.length - surface.offset;
|
||||
}
|
||||
|
||||
if (previousAnalyzed == null) {
|
||||
|
@ -679,11 +680,11 @@ public long ramBytesUsed() {
|
|||
builder.add(scratchInts.get(), outputs.newPair(cost, BytesRef.deepCopyOf(surface)));
|
||||
} else {
|
||||
int payloadOffset = input.getPosition() + surface.length;
|
||||
int payloadLength = scratch.length() - payloadOffset;
|
||||
int payloadLength = bytes.length - payloadOffset;
|
||||
BytesRef br = new BytesRef(surface.length + 1 + payloadLength);
|
||||
System.arraycopy(surface.bytes, surface.offset, br.bytes, 0, surface.length);
|
||||
br.bytes[surface.length] = (byte) payloadSep;
|
||||
System.arraycopy(scratch.bytes(), payloadOffset, br.bytes, surface.length+1, payloadLength);
|
||||
System.arraycopy(bytes.bytes, payloadOffset, br.bytes, surface.length+1, payloadLength);
|
||||
br.length = br.bytes.length;
|
||||
builder.add(scratchInts.get(), outputs.newPair(cost, br));
|
||||
}
|
||||
|
|
|
@ -77,7 +77,7 @@ public class Version {
|
|||
public static final int V_5_0_0_alpha3_ID = 5000003;
|
||||
public static final Version V_5_0_0_alpha3 = new Version(V_5_0_0_alpha3_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
|
||||
public static final int V_5_0_0_ID = 5000099;
|
||||
public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_1);
|
||||
public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_1_0);
|
||||
public static final Version CURRENT = V_5_0_0;
|
||||
|
||||
static {
|
||||
|
|
|
@ -33,6 +33,13 @@ public class JavaVersion implements Comparable<JavaVersion> {
|
|||
}
|
||||
|
||||
private JavaVersion(List<Integer> version) {
|
||||
if (version.size() >= 2
|
||||
&& version.get(0).intValue() == 1
|
||||
&& version.get(1).intValue() == 8) {
|
||||
// for Java 8 there is ambiguity since both 1.8 and 8 are supported,
|
||||
// so we rewrite the former to the latter
|
||||
version = new ArrayList<>(version.subList(1, version.size()));
|
||||
}
|
||||
this.version = Collections.unmodifiableList(version);
|
||||
}
|
||||
|
||||
|
@ -75,6 +82,19 @@ public class JavaVersion implements Comparable<JavaVersion> {
|
|||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o == null || o.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
return compareTo((JavaVersion) o) == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return version.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return version.stream().map(v -> Integer.toString(v)).collect(Collectors.joining("."));
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.elasticsearch.common.geo;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.util.BitUtil;
|
||||
|
||||
/**
|
||||
|
@ -39,7 +39,7 @@ public class GeoHashUtils {
|
|||
|
||||
/** maximum precision for geohash strings */
|
||||
public static final int PRECISION = 12;
|
||||
private static final short MORTON_OFFSET = (GeoEncodingUtils.BITS<<1) - (PRECISION*5);
|
||||
private static final short MORTON_OFFSET = (GeoPointField.BITS<<1) - (PRECISION*5);
|
||||
|
||||
// No instance:
|
||||
private GeoHashUtils() {
|
||||
|
@ -51,7 +51,7 @@ public class GeoHashUtils {
|
|||
public static final long longEncode(final double lon, final double lat, final int level) {
|
||||
// shift to appropriate level
|
||||
final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET);
|
||||
return ((BitUtil.flipFlop(GeoEncodingUtils.mortonHash(lat, lon)) >>> msf) << 4) | level;
|
||||
return ((BitUtil.flipFlop(GeoPointField.encodeLatLon(lat, lon)) >>> msf) << 4) | level;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -117,7 +117,7 @@ public class GeoHashUtils {
|
|||
*/
|
||||
public static final String stringEncode(final double lon, final double lat, final int level) {
|
||||
// convert to geohashlong
|
||||
final long ghLong = fromMorton(GeoEncodingUtils.mortonHash(lat, lon), level);
|
||||
final long ghLong = fromMorton(GeoPointField.encodeLatLon(lat, lon), level);
|
||||
return stringEncode(ghLong);
|
||||
|
||||
}
|
||||
|
@ -138,7 +138,7 @@ public class GeoHashUtils {
|
|||
|
||||
StringBuilder geoHash = new StringBuilder();
|
||||
short precision = 0;
|
||||
final short msf = (GeoEncodingUtils.BITS<<1)-5;
|
||||
final short msf = (GeoPointField.BITS<<1)-5;
|
||||
long mask = 31L<<msf;
|
||||
do {
|
||||
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
|
||||
|
|
|
@ -19,12 +19,11 @@
|
|||
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.util.BitUtil;
|
||||
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.mortonEncode;
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLat;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLon;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -84,14 +83,14 @@ public final class GeoPoint {
|
|||
}
|
||||
|
||||
public GeoPoint resetFromIndexHash(long hash) {
|
||||
lon = mortonUnhashLon(hash);
|
||||
lat = mortonUnhashLat(hash);
|
||||
lon = GeoPointField.decodeLongitude(hash);
|
||||
lat = GeoPointField.decodeLatitude(hash);
|
||||
return this;
|
||||
}
|
||||
|
||||
public GeoPoint resetFromGeoHash(String geohash) {
|
||||
final long hash = mortonEncode(geohash);
|
||||
return this.reset(mortonUnhashLat(hash), mortonUnhashLon(hash));
|
||||
return this.reset(GeoPointField.decodeLatitude(hash), GeoPointField.decodeLongitude(hash));
|
||||
}
|
||||
|
||||
public GeoPoint resetFromGeoHash(long geohashLong) {
|
||||
|
@ -164,8 +163,4 @@ public final class GeoPoint {
|
|||
public static GeoPoint fromGeohash(long geohashLong) {
|
||||
return new GeoPoint().resetFromGeoHash(geohashLong);
|
||||
}
|
||||
|
||||
public static GeoPoint fromIndexLong(long indexLong) {
|
||||
return new GeoPoint().resetFromIndexHash(indexLong);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
|
||||
import static org.apache.lucene.spatial.util.GeoDistanceUtils.maxRadialDistanceMeters;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -67,6 +66,9 @@ public class GeoUtils {
|
|||
/** Earth ellipsoid polar distance in meters */
|
||||
public static final double EARTH_POLAR_DISTANCE = Math.PI * EARTH_SEMI_MINOR_AXIS;
|
||||
|
||||
/** rounding error for quantized latitude and longitude values */
|
||||
public static final double TOLERANCE = 1E-6;
|
||||
|
||||
/** Returns the minimum between the provided distance 'initialRadius' and the
|
||||
* maximum distance/radius from the point 'center' before overlapping
|
||||
**/
|
||||
|
@ -468,6 +470,14 @@ public class GeoUtils {
|
|||
}
|
||||
}
|
||||
|
||||
/** Returns the maximum distance/radius (in meters) from the point 'center' before overlapping */
|
||||
public static double maxRadialDistanceMeters(final double centerLat, final double centerLon) {
|
||||
if (Math.abs(centerLat) == MAX_LAT) {
|
||||
return SloppyMath.haversinMeters(centerLat, centerLon, 0, centerLon);
|
||||
}
|
||||
return SloppyMath.haversinMeters(centerLat, centerLon, centerLat, (MAX_LON + centerLon) % 360);
|
||||
}
|
||||
|
||||
private GeoUtils() {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.SmallFloat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -63,6 +64,19 @@ public final class AllTermQuery extends Query {
|
|||
this.term = term;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (sameClassAs(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
return Objects.equals(term, ((AllTermQuery) obj).term);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * classHash() + term.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader) throws IOException {
|
||||
Query rewritten = super.rewrite(reader);
|
||||
|
|
|
@ -66,4 +66,14 @@ public class MatchNoDocsQuery extends Query {
|
|||
public String toString(String field) {
|
||||
return "MatchNoDocsQuery[\"" + reason + "\"]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return sameClassAs(obj);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return classHash();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,14 +84,14 @@ public class MoreLikeThisQuery extends Query {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), boostTerms, boostTermsFactor, Arrays.hashCode(likeText),
|
||||
return Objects.hash(classHash(), boostTerms, boostTermsFactor, Arrays.hashCode(likeText),
|
||||
maxDocFreq, maxQueryTerms, maxWordLen, minDocFreq, minTermFrequency, minWordLen,
|
||||
Arrays.hashCode(moreLikeFields), minimumShouldMatch, stopWords);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (super.equals(obj) == false) {
|
||||
if (sameClassAs(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
MoreLikeThisQuery other = (MoreLikeThisQuery) obj;
|
||||
|
|
|
@ -238,7 +238,7 @@ public class MultiPhrasePrefixQuery extends Query {
|
|||
*/
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (super.equals(o) == false) {
|
||||
if (sameClassAs(o) == false) {
|
||||
return false;
|
||||
}
|
||||
MultiPhrasePrefixQuery other = (MultiPhrasePrefixQuery) o;
|
||||
|
@ -252,7 +252,7 @@ public class MultiPhrasePrefixQuery extends Query {
|
|||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return super.hashCode()
|
||||
return classHash()
|
||||
^ slop
|
||||
^ termArraysHashCode()
|
||||
^ positions.hashCode();
|
||||
|
|
|
@ -355,7 +355,7 @@ public class FiltersFunctionScoreQuery extends Query {
|
|||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (super.equals(o) == false) {
|
||||
if (sameClassAs(o) == false) {
|
||||
return false;
|
||||
}
|
||||
FiltersFunctionScoreQuery other = (FiltersFunctionScoreQuery) o;
|
||||
|
@ -367,6 +367,6 @@ public class FiltersFunctionScoreQuery extends Query {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), subQuery, maxBoost, combineFunction, minScore, scoreMode, Arrays.hashCode(filterFunctions));
|
||||
return Objects.hash(classHash(), subQuery, maxBoost, combineFunction, minScore, scoreMode, Arrays.hashCode(filterFunctions));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -210,7 +210,7 @@ public class FunctionScoreQuery extends Query {
|
|||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (super.equals(o) == false) {
|
||||
if (sameClassAs(o) == false) {
|
||||
return false;
|
||||
}
|
||||
FunctionScoreQuery other = (FunctionScoreQuery) o;
|
||||
|
@ -221,6 +221,6 @@ public class FunctionScoreQuery extends Query {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), subQuery.hashCode(), function, combineFunction, minScore, maxBoost);
|
||||
return Objects.hash(classHash(), subQuery.hashCode(), function, combineFunction, minScore, maxBoost);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -181,6 +181,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
|||
IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING,
|
||||
IndicesQueryCache.INDICES_CACHE_QUERY_SIZE_SETTING,
|
||||
IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING,
|
||||
IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING,
|
||||
IndicesTTLService.INDICES_TTL_INTERVAL_SETTING,
|
||||
MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING,
|
||||
MetaData.SETTING_READ_ONLY_SETTING,
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.index.PostingsEnum;
|
|||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -426,7 +425,7 @@ public final class OrdinalsBuilder implements Closeable {
|
|||
protected AcceptStatus accept(BytesRef term) throws IOException {
|
||||
// accept only the max resolution terms
|
||||
// todo is this necessary?
|
||||
return GeoEncodingUtils.getPrefixCodedShift(term) == GeoPointField.PRECISION_STEP * 4 ?
|
||||
return GeoPointField.getPrefixCodedShift(term) == GeoPointField.PRECISION_STEP * 4 ?
|
||||
AcceptStatus.YES : AcceptStatus.END;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
|
@ -58,7 +57,7 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<Ato
|
|||
return null;
|
||||
}
|
||||
if (termEncoding == GeoPointField.TermEncoding.PREFIX) {
|
||||
return GeoEncodingUtils.prefixCodedToGeoCoded(term);
|
||||
return GeoPointField.prefixCodedToGeoCoded(term);
|
||||
} else if (termEncoding == GeoPointField.TermEncoding.NUMERIC) {
|
||||
return LegacyNumericUtils.prefixCodedToLong(term);
|
||||
}
|
||||
|
|
|
@ -204,7 +204,7 @@ public class DateFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!super.equals(o)) return false;
|
||||
if (sameClassAs(o) == false) return false;
|
||||
|
||||
LateParsingQuery that = (LateParsingQuery) o;
|
||||
if (includeLower != that.includeLower) return false;
|
||||
|
@ -218,7 +218,7 @@ public class DateFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), lowerTerm, upperTerm, includeLower, includeUpper, timeZone);
|
||||
return Objects.hash(classHash(), lowerTerm, upperTerm, includeLower, includeUpper, timeZone);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -213,7 +213,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!super.equals(o)) return false;
|
||||
if (sameClassAs(o) == false) return false;
|
||||
|
||||
LateParsingQuery that = (LateParsingQuery) o;
|
||||
if (includeLower != that.includeLower) return false;
|
||||
|
@ -227,7 +227,7 @@ public class LegacyDateFieldMapper extends LegacyNumberFieldMapper {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), lowerTerm, upperTerm, includeLower, includeUpper, timeZone);
|
||||
return Objects.hash(classHash(), lowerTerm, upperTerm, includeLower, includeUpper, timeZone);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -195,7 +195,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (super.equals(obj) == false) {
|
||||
if (sameClassAs(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
TypeQuery that = (TypeQuery) obj;
|
||||
|
@ -204,7 +204,7 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * super.hashCode() + type.hashCode();
|
||||
return 31 * classHash() + type.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.InetAddressPoint;
|
||||
import org.apache.lucene.document.SortedSetDocValuesField;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.document.XInetAddressPoint;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.XPointValues;
|
||||
|
@ -176,7 +175,7 @@ public class IpFieldMapper extends FieldMapper implements AllFieldMapper.Include
|
|||
if (fields.length == 2) {
|
||||
InetAddress address = InetAddresses.forString(fields[0]);
|
||||
int prefixLength = Integer.parseInt(fields[1]);
|
||||
return XInetAddressPoint.newPrefixQuery(name(), address, prefixLength);
|
||||
return InetAddressPoint.newPrefixQuery(name(), address, prefixLength);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Expected [ip/prefix] but was [" + term + "]");
|
||||
}
|
||||
|
@ -191,27 +190,27 @@ public class IpFieldMapper extends FieldMapper implements AllFieldMapper.Include
|
|||
failIfNotIndexed();
|
||||
InetAddress lower;
|
||||
if (lowerTerm == null) {
|
||||
lower = XInetAddressPoint.MIN_VALUE;
|
||||
lower = InetAddressPoint.MIN_VALUE;
|
||||
} else {
|
||||
lower = parse(lowerTerm);
|
||||
if (includeLower == false) {
|
||||
if (lower.equals(XInetAddressPoint.MAX_VALUE)) {
|
||||
if (lower.equals(InetAddressPoint.MAX_VALUE)) {
|
||||
return new MatchNoDocsQuery();
|
||||
}
|
||||
lower = XInetAddressPoint.nextUp(lower);
|
||||
lower = InetAddressPoint.nextUp(lower);
|
||||
}
|
||||
}
|
||||
|
||||
InetAddress upper;
|
||||
if (upperTerm == null) {
|
||||
upper = XInetAddressPoint.MAX_VALUE;
|
||||
upper = InetAddressPoint.MAX_VALUE;
|
||||
} else {
|
||||
upper = parse(upperTerm);
|
||||
if (includeUpper == false) {
|
||||
if (upper.equals(XInetAddressPoint.MIN_VALUE)) {
|
||||
if (upper.equals(InetAddressPoint.MIN_VALUE)) {
|
||||
return new MatchNoDocsQuery();
|
||||
}
|
||||
upper = XInetAddressPoint.nextDown(upper);
|
||||
upper = InetAddressPoint.nextDown(upper);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.search.MatchNoDocsQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.geopoint.search.XGeoPointDistanceRangeQuery;
|
||||
import org.apache.lucene.spatial.util.GeoDistanceUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
|
@ -48,8 +47,6 @@ import java.util.Locale;
|
|||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.TOLERANCE;
|
||||
|
||||
public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistanceRangeQueryBuilder> {
|
||||
|
||||
public static final String NAME = "geo_distance_range";
|
||||
|
@ -354,7 +351,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
toValue = geoDistance.normalize(toValue, DistanceUnit.DEFAULT);
|
||||
}
|
||||
} else {
|
||||
toValue = GeoDistanceUtils.maxRadialDistanceMeters(point.lat(), point.lon());
|
||||
toValue = GeoUtils.maxRadialDistanceMeters(point.lat(), point.lon());
|
||||
}
|
||||
|
||||
final Version indexVersionCreated = context.indexVersionCreated();
|
||||
|
@ -371,8 +368,8 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX;
|
||||
|
||||
return new XGeoPointDistanceRangeQuery(fieldType.name(), encoding, point.lat(), point.lon(),
|
||||
(includeLower) ? fromValue : fromValue + TOLERANCE,
|
||||
(includeUpper) ? toValue : toValue - TOLERANCE);
|
||||
(includeLower) ? fromValue : fromValue + GeoUtils.TOLERANCE,
|
||||
(includeUpper) ? toValue : toValue - GeoUtils.TOLERANCE);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -411,7 +411,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!super.equals(o)) return false;
|
||||
if (sameClassAs(o) == false) return false;
|
||||
|
||||
LateParsingQuery that = (LateParsingQuery) o;
|
||||
|
||||
|
@ -425,7 +425,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), toQuery, innerQuery, minChildren, maxChildren, parentType, scoreMode);
|
||||
return Objects.hash(classHash(), toQuery, innerQuery, minChildren, maxChildren, parentType, scoreMode);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -184,7 +184,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
|
|||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (!super.equals(obj))
|
||||
if (sameClassAs(obj) == false)
|
||||
return false;
|
||||
ScriptQuery other = (ScriptQuery) obj;
|
||||
return Objects.equals(script, other.script);
|
||||
|
@ -192,7 +192,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), script);
|
||||
return Objects.hash(classHash(), script);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -190,7 +190,7 @@ public class GeoDistanceRangeQuery extends Query {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (super.equals(o) == false) return false;
|
||||
if (sameClassAs(o) == false) return false;
|
||||
|
||||
GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) o;
|
||||
|
||||
|
@ -212,7 +212,7 @@ public class GeoDistanceRangeQuery extends Query {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
int result = classHash();
|
||||
long temp;
|
||||
temp = lat != +0.0d ? Double.doubleToLongBits(lat) : 0L;
|
||||
result = 31 * result + Long.hashCode(temp);
|
||||
|
|
|
@ -111,7 +111,7 @@ public class GeoPolygonQuery extends Query {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (super.equals(obj) == false) {
|
||||
if (sameClassAs(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
GeoPolygonQuery that = (GeoPolygonQuery) obj;
|
||||
|
@ -121,7 +121,7 @@ public class GeoPolygonQuery extends Query {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int h = super.hashCode();
|
||||
int h = classHash();
|
||||
h = 31 * h + indexFieldData.getFieldName().hashCode();
|
||||
h = 31 * h + Arrays.hashCode(points);
|
||||
return h;
|
||||
|
|
|
@ -84,7 +84,7 @@ public class InMemoryGeoBoundingBoxQuery extends Query {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (super.equals(obj) == false) {
|
||||
if (sameClassAs(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
InMemoryGeoBoundingBoxQuery other = (InMemoryGeoBoundingBoxQuery) obj;
|
||||
|
@ -95,7 +95,7 @@ public class InMemoryGeoBoundingBoxQuery extends Query {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), fieldName(), topLeft, bottomRight);
|
||||
return Objects.hash(classHash(), fieldName(), topLeft, bottomRight);
|
||||
}
|
||||
|
||||
private static class Meridian180GeoBoundingBoxBits implements Bits {
|
||||
|
|
|
@ -131,16 +131,7 @@ final class StoreRecovery {
|
|||
}
|
||||
|
||||
final void addIndices(RecoveryState.Index indexRecoveryStats, Directory target, Directory... sources) throws IOException {
|
||||
/*
|
||||
* TODO: once we upgraded to Lucene 6.1 use HardlinkCopyDirectoryWrapper to enable hardlinks if possible and enable it
|
||||
* in the security.policy:
|
||||
*
|
||||
* grant codeBase "${codebase.lucene-misc-6.1.0.jar}" {
|
||||
* // needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper
|
||||
* permission java.nio.file.LinkPermission "hard";
|
||||
* };
|
||||
* target = new org.apache.lucene.store.HardlinkCopyDirectoryWrapper(target);
|
||||
*/
|
||||
target = new org.apache.lucene.store.HardlinkCopyDirectoryWrapper(target);
|
||||
try (IndexWriter writer = new IndexWriter(new StatsDirectoryWrapper(target, indexRecoveryStats),
|
||||
new IndexWriterConfig(null)
|
||||
.setCommitOnClose(false)
|
||||
|
|
|
@ -45,6 +45,7 @@ import java.util.IdentityHashMap;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class IndicesQueryCache extends AbstractComponent implements QueryCache, Closeable {
|
||||
|
||||
|
@ -52,6 +53,9 @@ public class IndicesQueryCache extends AbstractComponent implements QueryCache,
|
|||
"indices.queries.cache.size", "10%", Property.NodeScope);
|
||||
public static final Setting<Integer> INDICES_CACHE_QUERY_COUNT_SETTING = Setting.intSetting(
|
||||
"indices.queries.cache.count", 10000, 1, Property.NodeScope);
|
||||
// enables caching on all segments instead of only the larger ones, for testing only
|
||||
public static final Setting<Boolean> INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING = Setting.boolSetting(
|
||||
"indices.queries.cache.all_segments", false, Property.NodeScope);
|
||||
|
||||
private final LRUQueryCache cache;
|
||||
private final ShardCoreKeyMap shardKeyMap = new ShardCoreKeyMap();
|
||||
|
@ -69,111 +73,11 @@ public class IndicesQueryCache extends AbstractComponent implements QueryCache,
|
|||
final int count = INDICES_CACHE_QUERY_COUNT_SETTING.get(settings);
|
||||
logger.debug("using [node] query cache with size [{}] max filter count [{}]",
|
||||
size, count);
|
||||
cache = new LRUQueryCache(count, size.bytes()) {
|
||||
|
||||
private Stats getStats(Object coreKey) {
|
||||
final ShardId shardId = shardKeyMap.getShardId(coreKey);
|
||||
if (shardId == null) {
|
||||
return null;
|
||||
}
|
||||
return shardStats.get(shardId);
|
||||
}
|
||||
|
||||
private Stats getOrCreateStats(Object coreKey) {
|
||||
final ShardId shardId = shardKeyMap.getShardId(coreKey);
|
||||
Stats stats = shardStats.get(shardId);
|
||||
if (stats == null) {
|
||||
stats = new Stats();
|
||||
shardStats.put(shardId, stats);
|
||||
}
|
||||
return stats;
|
||||
}
|
||||
|
||||
// It's ok to not protect these callbacks by a lock since it is
|
||||
// done in LRUQueryCache
|
||||
@Override
|
||||
protected void onClear() {
|
||||
assert Thread.holdsLock(this);
|
||||
super.onClear();
|
||||
for (Stats stats : shardStats.values()) {
|
||||
// don't throw away hit/miss
|
||||
stats.cacheSize = 0;
|
||||
stats.ramBytesUsed = 0;
|
||||
}
|
||||
sharedRamBytesUsed = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onQueryCache(Query filter, long ramBytesUsed) {
|
||||
assert Thread.holdsLock(this);
|
||||
super.onQueryCache(filter, ramBytesUsed);
|
||||
sharedRamBytesUsed += ramBytesUsed;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onQueryEviction(Query filter, long ramBytesUsed) {
|
||||
assert Thread.holdsLock(this);
|
||||
super.onQueryEviction(filter, ramBytesUsed);
|
||||
sharedRamBytesUsed -= ramBytesUsed;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onDocIdSetCache(Object readerCoreKey, long ramBytesUsed) {
|
||||
assert Thread.holdsLock(this);
|
||||
super.onDocIdSetCache(readerCoreKey, ramBytesUsed);
|
||||
final Stats shardStats = getOrCreateStats(readerCoreKey);
|
||||
shardStats.cacheSize += 1;
|
||||
shardStats.cacheCount += 1;
|
||||
shardStats.ramBytesUsed += ramBytesUsed;
|
||||
|
||||
StatsAndCount statsAndCount = stats2.get(readerCoreKey);
|
||||
if (statsAndCount == null) {
|
||||
statsAndCount = new StatsAndCount(shardStats);
|
||||
stats2.put(readerCoreKey, statsAndCount);
|
||||
}
|
||||
statsAndCount.count += 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onDocIdSetEviction(Object readerCoreKey, int numEntries, long sumRamBytesUsed) {
|
||||
assert Thread.holdsLock(this);
|
||||
super.onDocIdSetEviction(readerCoreKey, numEntries, sumRamBytesUsed);
|
||||
// onDocIdSetEviction might sometimes be called with a number
|
||||
// of entries equal to zero if the cache for the given segment
|
||||
// was already empty when the close listener was called
|
||||
if (numEntries > 0) {
|
||||
// We can't use ShardCoreKeyMap here because its core closed
|
||||
// listener is called before the listener of the cache which
|
||||
// triggers this eviction. So instead we use use stats2 that
|
||||
// we only evict when nothing is cached anymore on the segment
|
||||
// instead of relying on close listeners
|
||||
final StatsAndCount statsAndCount = stats2.get(readerCoreKey);
|
||||
final Stats shardStats = statsAndCount.stats;
|
||||
shardStats.cacheSize -= numEntries;
|
||||
shardStats.ramBytesUsed -= sumRamBytesUsed;
|
||||
statsAndCount.count -= numEntries;
|
||||
if (statsAndCount.count == 0) {
|
||||
stats2.remove(readerCoreKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onHit(Object readerCoreKey, Query filter) {
|
||||
assert Thread.holdsLock(this);
|
||||
super.onHit(readerCoreKey, filter);
|
||||
final Stats shardStats = getStats(readerCoreKey);
|
||||
shardStats.hitCount += 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onMiss(Object readerCoreKey, Query filter) {
|
||||
assert Thread.holdsLock(this);
|
||||
super.onMiss(readerCoreKey, filter);
|
||||
final Stats shardStats = getOrCreateStats(readerCoreKey);
|
||||
shardStats.missCount += 1;
|
||||
}
|
||||
};
|
||||
if (INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.get(settings)) {
|
||||
cache = new ElasticsearchLRUQueryCache(count, size.bytes(), context -> true);
|
||||
} else {
|
||||
cache = new ElasticsearchLRUQueryCache(count, size.bytes());
|
||||
}
|
||||
sharedRamBytesUsed = 0;
|
||||
}
|
||||
|
||||
|
@ -316,4 +220,111 @@ public class IndicesQueryCache extends AbstractComponent implements QueryCache,
|
|||
assert empty(shardStats.get(shardId));
|
||||
shardStats.remove(shardId);
|
||||
}
|
||||
|
||||
private class ElasticsearchLRUQueryCache extends LRUQueryCache {
|
||||
|
||||
ElasticsearchLRUQueryCache(int maxSize, long maxRamBytesUsed, Predicate<LeafReaderContext> leavesToCache) {
|
||||
super(maxSize, maxRamBytesUsed, leavesToCache);
|
||||
}
|
||||
|
||||
ElasticsearchLRUQueryCache(int maxSize, long maxRamBytesUsed) {
|
||||
super(maxSize, maxRamBytesUsed);
|
||||
}
|
||||
|
||||
private Stats getStats(Object coreKey) {
|
||||
final ShardId shardId = shardKeyMap.getShardId(coreKey);
|
||||
if (shardId == null) {
|
||||
return null;
|
||||
}
|
||||
return shardStats.get(shardId);
|
||||
}
|
||||
|
||||
private Stats getOrCreateStats(Object coreKey) {
|
||||
final ShardId shardId = shardKeyMap.getShardId(coreKey);
|
||||
Stats stats = shardStats.get(shardId);
|
||||
if (stats == null) {
|
||||
stats = new Stats();
|
||||
shardStats.put(shardId, stats);
|
||||
}
|
||||
return stats;
|
||||
}
|
||||
|
||||
// It's ok to not protect these callbacks by a lock since it is
|
||||
// done in LRUQueryCache
|
||||
@Override
|
||||
protected void onClear() {
|
||||
super.onClear();
|
||||
for (Stats stats : shardStats.values()) {
|
||||
// don't throw away hit/miss
|
||||
stats.cacheSize = 0;
|
||||
stats.ramBytesUsed = 0;
|
||||
}
|
||||
sharedRamBytesUsed = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onQueryCache(Query filter, long ramBytesUsed) {
|
||||
super.onQueryCache(filter, ramBytesUsed);
|
||||
sharedRamBytesUsed += ramBytesUsed;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onQueryEviction(Query filter, long ramBytesUsed) {
|
||||
super.onQueryEviction(filter, ramBytesUsed);
|
||||
sharedRamBytesUsed -= ramBytesUsed;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onDocIdSetCache(Object readerCoreKey, long ramBytesUsed) {
|
||||
super.onDocIdSetCache(readerCoreKey, ramBytesUsed);
|
||||
final Stats shardStats = getOrCreateStats(readerCoreKey);
|
||||
shardStats.cacheSize += 1;
|
||||
shardStats.cacheCount += 1;
|
||||
shardStats.ramBytesUsed += ramBytesUsed;
|
||||
|
||||
StatsAndCount statsAndCount = stats2.get(readerCoreKey);
|
||||
if (statsAndCount == null) {
|
||||
statsAndCount = new StatsAndCount(shardStats);
|
||||
stats2.put(readerCoreKey, statsAndCount);
|
||||
}
|
||||
statsAndCount.count += 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onDocIdSetEviction(Object readerCoreKey, int numEntries, long sumRamBytesUsed) {
|
||||
super.onDocIdSetEviction(readerCoreKey, numEntries, sumRamBytesUsed);
|
||||
// onDocIdSetEviction might sometimes be called with a number
|
||||
// of entries equal to zero if the cache for the given segment
|
||||
// was already empty when the close listener was called
|
||||
if (numEntries > 0) {
|
||||
// We can't use ShardCoreKeyMap here because its core closed
|
||||
// listener is called before the listener of the cache which
|
||||
// triggers this eviction. So instead we use use stats2 that
|
||||
// we only evict when nothing is cached anymore on the segment
|
||||
// instead of relying on close listeners
|
||||
final StatsAndCount statsAndCount = stats2.get(readerCoreKey);
|
||||
final Stats shardStats = statsAndCount.stats;
|
||||
shardStats.cacheSize -= numEntries;
|
||||
shardStats.ramBytesUsed -= sumRamBytesUsed;
|
||||
statsAndCount.count -= numEntries;
|
||||
if (statsAndCount.count == 0) {
|
||||
stats2.remove(readerCoreKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onHit(Object readerCoreKey, Query filter) {
|
||||
super.onHit(readerCoreKey, filter);
|
||||
final Stats shardStats = getStats(readerCoreKey);
|
||||
shardStats.hitCount += 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onMiss(Object readerCoreKey, Query filter) {
|
||||
super.onMiss(readerCoreKey, filter);
|
||||
final Stats shardStats = getOrCreateStats(readerCoreKey);
|
||||
shardStats.missCount += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.geocentroid;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
|
@ -82,9 +82,9 @@ public final class GeoCentroidAggregator extends MetricsAggregator {
|
|||
counts.increment(bucket, valueCount);
|
||||
// get the previous GeoPoint if a moving avg was computed
|
||||
if (prevCounts > 0) {
|
||||
final GeoPoint centroid = GeoPoint.fromIndexLong(centroids.get(bucket));
|
||||
pt[0] = centroid.lon();
|
||||
pt[1] = centroid.lat();
|
||||
final long mortonCode = centroids.get(bucket);
|
||||
pt[0] = GeoPointField.decodeLongitude(mortonCode);
|
||||
pt[1] = GeoPointField.decodeLatitude(mortonCode);
|
||||
}
|
||||
// update the moving average
|
||||
for (int i = 0; i < valueCount; ++i) {
|
||||
|
@ -92,7 +92,9 @@ public final class GeoCentroidAggregator extends MetricsAggregator {
|
|||
pt[0] = pt[0] + (value.getLon() - pt[0]) / ++prevCounts;
|
||||
pt[1] = pt[1] + (value.getLat() - pt[1]) / prevCounts;
|
||||
}
|
||||
centroids.set(bucket, GeoEncodingUtils.mortonHash(pt[1], pt[0]));
|
||||
// TODO: we do not need to interleave the lat and lon bits here
|
||||
// should we just store them contiguously?
|
||||
centroids.set(bucket, GeoPointField.encodeLatLon(pt[1], pt[0]));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -104,8 +106,10 @@ public final class GeoCentroidAggregator extends MetricsAggregator {
|
|||
return buildEmptyAggregation();
|
||||
}
|
||||
final long bucketCount = counts.get(bucket);
|
||||
final GeoPoint bucketCentroid = (bucketCount > 0) ? GeoPoint.fromIndexLong(centroids.get(bucket)) :
|
||||
new GeoPoint(Double.NaN, Double.NaN);
|
||||
final long mortonCode = centroids.get(bucket);
|
||||
final GeoPoint bucketCentroid = (bucketCount > 0)
|
||||
? new GeoPoint(GeoPointField.decodeLatitude(mortonCode), GeoPointField.decodeLongitude(mortonCode))
|
||||
: null;
|
||||
return new InternalGeoCentroid(name, bucketCentroid , bucketCount, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.metrics.geocentroid;
|
||||
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -61,6 +61,7 @@ public class InternalGeoCentroid extends InternalMetricsAggregation implements G
|
|||
public InternalGeoCentroid(String name, GeoPoint centroid, long count, List<PipelineAggregator>
|
||||
pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
assert (centroid == null) == (count == 0);
|
||||
this.centroid = centroid;
|
||||
assert count >= 0;
|
||||
this.count = count;
|
||||
|
@ -68,7 +69,7 @@ public class InternalGeoCentroid extends InternalMetricsAggregation implements G
|
|||
|
||||
@Override
|
||||
public GeoPoint centroid() {
|
||||
return (centroid == null || Double.isNaN(centroid.lon()) ? null : centroid);
|
||||
return centroid;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -128,7 +129,8 @@ public class InternalGeoCentroid extends InternalMetricsAggregation implements G
|
|||
protected void doReadFrom(StreamInput in) throws IOException {
|
||||
count = in.readVLong();
|
||||
if (in.readBoolean()) {
|
||||
centroid = GeoPoint.fromIndexLong(in.readLong());
|
||||
final long hash = in.readLong();
|
||||
centroid = new GeoPoint(GeoPointField.decodeLatitude(hash), GeoPointField.decodeLongitude(hash));
|
||||
} else {
|
||||
centroid = null;
|
||||
}
|
||||
|
@ -139,7 +141,8 @@ public class InternalGeoCentroid extends InternalMetricsAggregation implements G
|
|||
out.writeVLong(count);
|
||||
if (centroid != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeLong(GeoEncodingUtils.mortonHash(centroid.lat(), centroid.lon()));
|
||||
// should we just write lat and lon separately?
|
||||
out.writeLong(GeoPointField.encodeLatLon(centroid.lat(), centroid.lon()));
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
|
|
|
@ -175,7 +175,7 @@ public final class InnerHitsContext {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (super.equals(obj) == false) {
|
||||
if (sameClassAs(obj) == false) {
|
||||
return false;
|
||||
}
|
||||
NestedChildrenQuery other = (NestedChildrenQuery) obj;
|
||||
|
@ -187,7 +187,7 @@ public final class InnerHitsContext {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = super.hashCode();
|
||||
int hash = classHash();
|
||||
hash = 31 * hash + parentFilter.hashCode();
|
||||
hash = 31 * hash + childFilter.hashCode();
|
||||
hash = 31 * hash + docId;
|
||||
|
|
|
@ -61,7 +61,7 @@ public abstract class SliceQuery extends Query {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (super.equals(o) == false) {
|
||||
if (sameClassAs(o) == false) {
|
||||
return false;
|
||||
}
|
||||
SliceQuery that = (SliceQuery) o;
|
||||
|
@ -70,7 +70,7 @@ public abstract class SliceQuery extends Query {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), field, id, max);
|
||||
return Objects.hash(classHash(), field, id, max);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -74,11 +74,7 @@ public final class TermsSliceQuery extends SliceQuery {
|
|||
int hashCode = term.hashCode();
|
||||
if (contains(hashCode)) {
|
||||
docsEnum = te.postings(docsEnum, PostingsEnum.NONE);
|
||||
int docId = docsEnum.nextDoc();
|
||||
while (docId != DocIdSetIterator.NO_MORE_DOCS) {
|
||||
builder.add(docId);
|
||||
docId = docsEnum.nextDoc();
|
||||
}
|
||||
builder.add(docsEnum);
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
|
|
|
@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" {
|
|||
//// Very special jar permissions:
|
||||
//// These are dangerous permissions that we don't want to grant to everything.
|
||||
|
||||
grant codeBase "${codebase.lucene-core-6.0.1.jar}" {
|
||||
grant codeBase "${codebase.lucene-core-6.1.0-snapshot-3a57bea.jar}" {
|
||||
// needed to allow MMapDirectory's "unmap hack" (die unmap hack, die)
|
||||
// java 8 package
|
||||
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
|
||||
|
@ -42,6 +42,11 @@ grant codeBase "${codebase.lucene-core-6.0.1.jar}" {
|
|||
permission java.lang.RuntimePermission "accessDeclaredMembers";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.lucene-misc-6.1.0-snapshot-3a57bea.jar}" {
|
||||
// needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper
|
||||
permission java.nio.file.LinkPermission "hard";
|
||||
};
|
||||
|
||||
//// Everything else:
|
||||
|
||||
grant {
|
||||
|
|
|
@ -31,9 +31,11 @@ grant codeBase "${codebase.securemock-1.2.jar}" {
|
|||
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.lucene-test-framework-6.0.1.jar}" {
|
||||
grant codeBase "${codebase.lucene-test-framework-6.1.0-snapshot-3a57bea.jar}" {
|
||||
// needed by RamUsageTester
|
||||
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
|
||||
// needed for testing hardlinks in StoreRecoveryTests since we install MockFS
|
||||
permission java.nio.file.LinkPermission "hard";
|
||||
};
|
||||
|
||||
grant codeBase "${codebase.randomizedtesting-runner-2.3.2.jar}" {
|
||||
|
|
|
@ -270,7 +270,8 @@ public class VersionTests extends ESTestCase {
|
|||
assertTrue("lucene versions must be " + other + " >= " + version,
|
||||
other.luceneVersion.onOrAfter(version.luceneVersion));
|
||||
}
|
||||
if (other.major == version.major && other.minor == version.minor) {
|
||||
if (other.isAlpha() == false && version.isAlpha() == false
|
||||
&& other.major == version.major && other.minor == version.minor) {
|
||||
assertEquals(other.luceneVersion.major, version.luceneVersion.major);
|
||||
assertEquals(other.luceneVersion.minor, version.luceneVersion.minor);
|
||||
// should we also assert the lucene bugfix version?
|
||||
|
|
|
@ -72,4 +72,8 @@ public class JavaVersionTests extends ESTestCase {
|
|||
assertFalse(JavaVersion.isValid(version));
|
||||
}
|
||||
}
|
||||
|
||||
public void testJava8Compat() {
|
||||
assertEquals(JavaVersion.parse("1.8"), JavaVersion.parse("8"));
|
||||
}
|
||||
}
|
|
@ -22,9 +22,9 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.util.GeoUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
|
||||
import static org.elasticsearch.test.geo.RandomShapeGenerator.randomPoint;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
|
@ -105,8 +105,8 @@ public abstract class AbstractGeoFieldDataTestCase extends AbstractFieldDataImpl
|
|||
assertThat(docCount, greaterThan(0));
|
||||
for (int i = 0; i < docCount; ++i) {
|
||||
final GeoPoint point = values.valueAt(i);
|
||||
assertThat(point.lat(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LAT_INCL), lessThanOrEqualTo(GeoUtils.MAX_LAT_INCL)));
|
||||
assertThat(point.lon(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LON_INCL), lessThanOrEqualTo(GeoUtils.MAX_LON_INCL)));
|
||||
assertThat(point.lat(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LAT), lessThanOrEqualTo(GeoUtils.MAX_LAT)));
|
||||
assertThat(point.lon(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LON), lessThanOrEqualTo(GeoUtils.MAX_LON)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.externalvalues;
|
||||
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
@ -88,7 +88,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(42.0, 51.0)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0)));
|
||||
}
|
||||
|
||||
assertThat(doc.rootDoc().getField("field.shape"), notNullValue());
|
||||
|
@ -146,7 +146,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(42.0, 51.0)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0)));
|
||||
}
|
||||
|
||||
assertThat(doc.rootDoc().getField("field.shape"), notNullValue());
|
||||
|
@ -208,7 +208,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(42.0, 51.0)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0)));
|
||||
}
|
||||
|
||||
assertThat(doc.rootDoc().getField("field.shape"), notNullValue());
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -46,7 +47,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.lang.NumberFormatException;
|
||||
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonHash;
|
||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
@ -90,7 +90,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (indexCreatedBefore22 == true) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -197,7 +197,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("89.0,1.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(89.0, 1.0)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(89.0, 1.0)));
|
||||
}
|
||||
|
||||
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
|
@ -209,7 +209,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("-89.0,-1.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(-89.0, -1.0)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(-89.0, -1.0)));
|
||||
}
|
||||
|
||||
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
|
@ -221,7 +221,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("-1.0,-179.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(-1.0, -179.0)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(-1.0, -179.0)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -408,7 +408,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -441,7 +441,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.2));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.3));
|
||||
// indexed hash
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
|
||||
// point field for 2nd value
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[2].numericValue().doubleValue(), equalTo(1.4));
|
||||
|
@ -450,7 +450,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getFields("point.lat")[3].numericValue().doubleValue(), equalTo(1.4));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[3].numericValue().doubleValue(), equalTo(1.5));
|
||||
// indexed hash
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5)));
|
||||
} else {
|
||||
assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2));
|
||||
assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2));
|
||||
|
@ -459,14 +459,14 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -491,7 +491,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -517,7 +517,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -559,12 +559,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -588,7 +588,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -613,7 +613,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -639,7 +639,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -669,14 +669,14 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5)));
|
||||
}
|
||||
} else {
|
||||
assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(4));
|
||||
|
@ -685,12 +685,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.2));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.3));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[2].numericValue().doubleValue(), equalTo(1.4));
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[3].numericValue().doubleValue(), equalTo(1.4));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[2].numericValue().doubleValue(), equalTo(1.5));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[3].numericValue().doubleValue(), equalTo(1.5));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
@ -35,7 +36,6 @@ import org.elasticsearch.test.VersionUtils;
|
|||
import java.util.Collection;
|
||||
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonHash;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
@ -72,7 +72,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -96,7 +96,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
|
|||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.ip;
|
|||
import java.net.InetAddress;
|
||||
|
||||
import org.apache.lucene.document.InetAddressPoint;
|
||||
import org.apache.lucene.document.XInetAddressPoint;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -69,11 +68,11 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
|
|||
|
||||
ip = "2001:db8::2:1";
|
||||
String prefix = ip + "/64";
|
||||
assertEquals(XInetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64), ft.termQuery(prefix, null));
|
||||
assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64), ft.termQuery(prefix, null));
|
||||
|
||||
ip = "192.168.1.7";
|
||||
prefix = ip + "/16";
|
||||
assertEquals(XInetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16), ft.termQuery(prefix, null));
|
||||
assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16), ft.termQuery(prefix, null));
|
||||
|
||||
ft.setIndexOptions(IndexOptions.NONE);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
|
@ -88,7 +87,7 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
|
|||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("::"),
|
||||
XInetAddressPoint.MAX_VALUE),
|
||||
InetAddressPoint.MAX_VALUE),
|
||||
ft.rangeQuery(null, null, randomBoolean(), randomBoolean()));
|
||||
|
||||
assertEquals(
|
||||
|
@ -106,13 +105,13 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
|
|||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("2001:db8::"),
|
||||
XInetAddressPoint.MAX_VALUE),
|
||||
InetAddressPoint.MAX_VALUE),
|
||||
ft.rangeQuery("2001:db8::", null, true, randomBoolean()));
|
||||
|
||||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("2001:db8::1"),
|
||||
XInetAddressPoint.MAX_VALUE),
|
||||
InetAddressPoint.MAX_VALUE),
|
||||
ft.rangeQuery("2001:db8::", null, false, randomBoolean()));
|
||||
|
||||
assertEquals(
|
||||
|
@ -152,7 +151,7 @@ public class IpFieldTypeTests extends FieldTypeTestCase {
|
|||
assertEquals(
|
||||
InetAddressPoint.newRangeQuery("field",
|
||||
InetAddresses.forString("::1:0:0:0"),
|
||||
XInetAddressPoint.MAX_VALUE),
|
||||
InetAddressPoint.MAX_VALUE),
|
||||
// same lo/hi values but inclusive=false so this won't match anything
|
||||
ft.rangeQuery("255.255.255.255", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true));
|
||||
|
||||
|
|
|
@ -22,10 +22,10 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
@ -213,7 +213,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
|
|||
if (queryBuilder.geoDistance() != null) {
|
||||
distance = queryBuilder.geoDistance().normalize(distance, DistanceUnit.DEFAULT);
|
||||
distance = org.elasticsearch.common.geo.GeoUtils.maxRadialDistance(queryBuilder.point(), distance);
|
||||
assertThat(geoQuery.getRadiusMeters(), closeTo(distance, GeoEncodingUtils.TOLERANCE));
|
||||
assertThat(geoQuery.getRadiusMeters(), closeTo(distance, GeoUtils.TOLERANCE));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.search.XGeoPointDistanceRangeQuery;
|
||||
import org.apache.lucene.spatial.util.GeoDistanceUtils;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
@ -61,7 +60,7 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
|
|||
}
|
||||
}
|
||||
GeoPoint point = builder.point();
|
||||
final double maxRadius = GeoDistanceUtils.maxRadialDistanceMeters(point.lat(), point.lon());
|
||||
final double maxRadius = GeoUtils.maxRadialDistanceMeters(point.lat(), point.lon());
|
||||
final int fromValueMeters = randomInt((int)(maxRadius*0.5));
|
||||
final int toValueMeters = randomIntBetween(fromValueMeters + 1, (int)maxRadius);
|
||||
DistanceUnit fromToUnits = randomFrom(DistanceUnit.values());
|
||||
|
|
|
@ -100,8 +100,9 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
GeoPointInPolygonQuery geoQuery = (GeoPointInPolygonQuery) query;
|
||||
assertThat(geoQuery.getField(), equalTo(queryBuilder.fieldName()));
|
||||
List<GeoPoint> queryBuilderPoints = queryBuilder.points();
|
||||
double[] lats = geoQuery.getLats();
|
||||
double[] lons = geoQuery.getLons();
|
||||
assertEquals(1, geoQuery.getPolygons().length);
|
||||
double[] lats = geoQuery.getPolygons()[0].getPolyLats();
|
||||
double[] lons = geoQuery.getPolygons()[0].getPolyLons();
|
||||
assertThat(lats.length, equalTo(queryBuilderPoints.size()));
|
||||
assertThat(lons.length, equalTo(queryBuilderPoints.size()));
|
||||
for (int i=0; i < queryBuilderPoints.size(); ++i) {
|
||||
|
@ -321,8 +322,9 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
} else {
|
||||
GeoPointInPolygonQuery q = (GeoPointInPolygonQuery) parsedQuery;
|
||||
assertThat(q.getField(), equalTo(GEO_POINT_FIELD_NAME));
|
||||
final double[] lats = q.getLats();
|
||||
final double[] lons = q.getLons();
|
||||
assertEquals(1, q.getPolygons().length);
|
||||
final double[] lats = q.getPolygons()[0].getPolyLats();
|
||||
final double[] lons = q.getPolygons()[0].getPolyLons();
|
||||
assertThat(lats.length, equalTo(4));
|
||||
assertThat(lons.length, equalTo(4));
|
||||
assertThat(lats[0], closeTo(40, 1E-5));
|
||||
|
|
|
@ -61,5 +61,15 @@ public class DummyQueryParserPlugin extends Plugin {
|
|||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return matchAllDocsQuery.createWeight(searcher, needsScores);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return sameClassAs(obj);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return classHash();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -31,7 +31,6 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
|
@ -74,11 +73,9 @@ public class StoreRecoveryTests extends ESTestCase {
|
|||
assertEquals(numFiles, targetNumFiles);
|
||||
assertEquals(indexStats.totalFileCount(), targetNumFiles);
|
||||
if (hardLinksSupported(createTempDir())) {
|
||||
assertEquals("upgrade to HardlinkCopyDirectoryWrapper in Lucene 6.1", Version.LATEST, Version.LUCENE_6_0_1);
|
||||
// assertEquals(indexStats.reusedFileCount(), targetNumFiles); -- uncomment this once upgraded to Lucene 6.1
|
||||
assertEquals(indexStats.reusedFileCount(), 0);
|
||||
assertEquals(targetNumFiles, indexStats.reusedFileCount());
|
||||
} else {
|
||||
assertEquals(indexStats.reusedFileCount(), 0);
|
||||
assertEquals(0, indexStats.reusedFileCount(), 0);
|
||||
}
|
||||
DirectoryReader reader = DirectoryReader.open(target);
|
||||
SegmentInfos segmentCommitInfos = SegmentInfos.readLatestCommit(target);
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.index.cache.query.QueryCacheStats;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.IndicesQueryCache;
|
||||
|
@ -54,12 +55,12 @@ public class IndicesQueryCacheTests extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return super.equals(obj) && id == ((DummyQuery) obj).id;
|
||||
return sameClassAs(obj) && id == ((DummyQuery) obj).id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 31 * super.hashCode() + id;
|
||||
return 31 * classHash() + id;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -93,6 +94,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
|
|||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10)
|
||||
.put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true)
|
||||
.build();
|
||||
IndicesQueryCache cache = new IndicesQueryCache(settings);
|
||||
s.setQueryCache(cache);
|
||||
|
@ -173,6 +175,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
|
|||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10)
|
||||
.put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true)
|
||||
.build();
|
||||
IndicesQueryCache cache = new IndicesQueryCache(settings);
|
||||
s1.setQueryCache(cache);
|
||||
|
@ -298,6 +301,7 @@ public class IndicesQueryCacheTests extends ESTestCase {
|
|||
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING.getKey(), 10)
|
||||
.put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true)
|
||||
.build();
|
||||
IndicesQueryCache cache = new IndicesQueryCache(settings);
|
||||
s1.setQueryCache(cache);
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.elasticsearch.index.engine.VersionConflictEngineException;
|
|||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.store.IndexStore;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
import org.elasticsearch.indices.IndicesQueryCache;
|
||||
import org.elasticsearch.indices.IndicesRequestCache;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
@ -78,6 +79,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
//Filter/Query cache is cleaned periodically, default is 60s, so make sure it runs often. Thread.sleep for 60s is bad
|
||||
return Settings.builder().put(super.nodeSettings(nodeOrdinal))
|
||||
.put(IndicesService.INDICES_CACHE_CLEAN_INTERVAL_SETTING.getKey(), "1ms")
|
||||
.put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
|
|
@ -126,6 +126,8 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
// norelease
|
||||
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-7325")
|
||||
public void testLimitsBoundingBox() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
|||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
|
||||
import org.apache.lucene.spatial.util.GeoProjectionUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
|
@ -42,6 +41,7 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.MultiPolygonBuilder;
|
||||
|
@ -540,7 +540,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public static double distance(double lat1, double lon1, double lat2, double lon2) {
|
||||
return GeoProjectionUtils.SEMIMAJOR_AXIS * DistanceUtils.distHaversineRAD(
|
||||
return GeoUtils.EARTH_SEMI_MAJOR_AXIS * DistanceUtils.distHaversineRAD(
|
||||
DistanceUtils.toRadians(lat1),
|
||||
DistanceUtils.toRadians(lon1),
|
||||
DistanceUtils.toRadians(lat2),
|
||||
|
|
|
@ -318,13 +318,7 @@ public class SimpleNestedIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
|
||||
Explanation explanation = searchResponse.getHits().hits()[0].explanation();
|
||||
assertThat(explanation.getValue(), equalTo(2f));
|
||||
assertThat(explanation.toString(), startsWith("2.0 = sum of:\n 2.0 = Score based on child doc range from 0 to 1\n"));
|
||||
// TODO: Enable when changes from BlockJoinQuery#explain are added to Lucene (Most likely version 4.2)
|
||||
// assertThat(explanation.getDetails().length, equalTo(2));
|
||||
// assertThat(explanation.getDetails()[0].getValue(), equalTo(1f));
|
||||
// assertThat(explanation.getDetails()[0].getDescription(), equalTo("Child[0]"));
|
||||
// assertThat(explanation.getDetails()[1].getValue(), equalTo(1f));
|
||||
// assertThat(explanation.getDetails()[1].getDescription(), equalTo("Child[1]"));
|
||||
assertThat(explanation.toString(), startsWith("2.0 = sum of:\n 2.0 = Score based on 2 child docs in range from 0 to 1"));
|
||||
}
|
||||
|
||||
public void testSimpleNestedSorting() throws Exception {
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
53953c1a9b097f83209c84a422cf8f9d271f47c1
|
|
@ -0,0 +1 @@
|
|||
bf73c03e6b83f8e696133f40b9b1fc3381750149
|
|
@ -1 +0,0 @@
|
|||
3647088603be84b8f4916ef86954e3336b98d254
|
|
@ -0,0 +1 @@
|
|||
8bc384f55faf99b6d6cee6f34df4fbd3145afb4d
|
|
@ -1 +0,0 @@
|
|||
40ccd40bec54266a10aa1f81c565914ede8c0ca0
|
|
@ -0,0 +1 @@
|
|||
fe19e7558440e10db4bd7150931dff6a7cf73243
|
|
@ -1 +0,0 @@
|
|||
010daaae60227fbe719ca95e9b6fcdb5c38d4eba
|
|
@ -0,0 +1 @@
|
|||
c3f0de4cdd185d23bce66c580d9c12adb98182a5
|
|
@ -1 +0,0 @@
|
|||
65d74c3642e6a86ba905045473b17cc84826527e
|
|
@ -0,0 +1 @@
|
|||
ffb7087267bb6076b00c90f97ee36ebe23ea0662
|
|
@ -1 +0,0 @@
|
|||
2105e2826ce93d1f764e5a0a3afa9ee461d556c1
|
|
@ -0,0 +1 @@
|
|||
d071ad17bed58b3267f6fa0b2a8211f8fe18c912
|
|
@ -1 +0,0 @@
|
|||
e2cde0688e487a27d08df0c2d81d492b1f4cdc2a
|
|
@ -0,0 +1 @@
|
|||
f5e9b6eefe580a7f65276aca3192ca5796332509
|
|
@ -1 +0,0 @@
|
|||
e6e59996fc324319d695e41cf25e30e5f1e4c182
|
|
@ -0,0 +1 @@
|
|||
6b84a79c37b01197130cceb65e5573794f073df1
|
|
@ -1 +0,0 @@
|
|||
09b0e5862a676ff9e55a1bc6ca37ad578a25cb38
|
|
@ -0,0 +1 @@
|
|||
a9d51b77395dfdd7e6c4cf8c8506ebca5e1bb374
|
|
@ -1 +0,0 @@
|
|||
24f7ba0707aa01be2dd7749adff1659262be8f33
|
|
@ -0,0 +1 @@
|
|||
e322f004e574df119ba08dd8751a743422a46724
|
|
@ -1 +0,0 @@
|
|||
0faf4c0d7e0adb6fccd830a2d5797d4176b579fe
|
|
@ -0,0 +1 @@
|
|||
c7cb119652c906adcdf7fe64445c76d057329d63
|
|
@ -1 +0,0 @@
|
|||
4d94d006251c904de3f1503c64746400877d6fa3
|
|
@ -0,0 +1 @@
|
|||
ca6c17fe31884e968ae63fd475ce6532b767c7fa
|
|
@ -1 +0,0 @@
|
|||
3de19dbdb889fe87791dae291ac3b340586854c4
|
|
@ -0,0 +1 @@
|
|||
49235405e40757474aaa9e8e54946b67fe2a01d9
|
|
@ -1 +0,0 @@
|
|||
5b1b7a754e83e2d58a819afa279b20b08b48c9c1
|
|
@ -0,0 +1 @@
|
|||
39f6b29c428327860c1a342bd57800e79ad92ef5
|
|
@ -1 +0,0 @@
|
|||
55886bdaf16ecc6948e94b527837eaa1f16fe988
|
|
@ -0,0 +1 @@
|
|||
4e9f713d34fd4208bf308ac59132216f96521f13
|
|
@ -1 +0,0 @@
|
|||
2b76056dbd40fb51dc5e8ef71e1919ad23e635a1
|
|
@ -0,0 +1 @@
|
|||
e5a4b673918f448006c0531799706abebe9a1db0
|
|
@ -23,6 +23,7 @@ package org.elasticsearch.messy.tests;
|
|||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
|
@ -41,7 +42,6 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.TOLERANCE;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
|
@ -218,7 +218,7 @@ public class SimpleSortTests extends ESIntegTestCase {
|
|||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20L));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), closeTo(i, TOLERANCE));
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), closeTo(i, GeoUtils.TOLERANCE));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -243,7 +243,7 @@ public final class PercolateQuery extends Query implements Accountable {
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
if (sameClassAs(o) == false) return false;
|
||||
|
||||
PercolateQuery that = (PercolateQuery) o;
|
||||
|
||||
|
@ -254,7 +254,7 @@ public final class PercolateQuery extends Query implements Accountable {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
int result = classHash();
|
||||
result = 31 * result + documentType.hashCode();
|
||||
result = 31 * result + documentSource.hashCode();
|
||||
return result;
|
||||
|
|
|
@ -379,6 +379,16 @@ public class PercolateQueryTests extends ESTestCase {
|
|||
public String toString(String field) {
|
||||
return "custom{" + field + "}";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return sameClassAs(obj);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return classHash();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
da08d9919f54efd2e09968d49fe05f6ce3f0c7ce
|
|
@ -0,0 +1 @@
|
|||
9cd8cbea5baef18a36bee86846a9ba026d2a02e0
|
|
@ -1 +0,0 @@
|
|||
77905f563c47994a764a6ab3d5ec198c174567a7
|
|
@ -0,0 +1 @@
|
|||
86c6d6a367ed658351bd8c8828d6ed647ac79b7e
|
|
@ -1 +0,0 @@
|
|||
3ee5d909c269e5da7a92715f41ead88943b38123
|
|
@ -0,0 +1 @@
|
|||
6553bf764a69cd15e4fe1e55661382872795b853
|
|
@ -1 +0,0 @@
|
|||
3b7bdbf9efa84f8d8875bd7f1d8734276930b9c3
|
|
@ -0,0 +1 @@
|
|||
979817950bc806400d8fa12a609ef215b5bdebd6
|
|
@ -1 +0,0 @@
|
|||
e80e912621276e1009b72c06d5def188976c5426
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue