Merge branch 'master' into docs/multicast
This commit is contained in:
commit
fdf8e67de3
|
@ -20,7 +20,6 @@
|
|||
package org.apache.lucene.queryparser.classic;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
|
@ -28,7 +27,6 @@ import org.apache.lucene.util.automaton.Operations;
|
|||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
|
@ -69,16 +67,10 @@ public class QueryParserSettings {
|
|||
private DateTimeZone timeZone;
|
||||
|
||||
List<String> fields = null;
|
||||
Collection<String> queryTypes = null;
|
||||
ObjectFloatHashMap<String> boosts = null;
|
||||
float tieBreaker = 0.0f;
|
||||
boolean useDisMax = true;
|
||||
|
||||
public boolean isCacheable() {
|
||||
// a hack for now :) to determine if a query string is cacheable
|
||||
return !queryString.contains("now");
|
||||
}
|
||||
|
||||
public String queryString() {
|
||||
return queryString;
|
||||
}
|
||||
|
@ -271,14 +263,6 @@ public class QueryParserSettings {
|
|||
this.fields = fields;
|
||||
}
|
||||
|
||||
public Collection<String> queryTypes() {
|
||||
return queryTypes;
|
||||
}
|
||||
|
||||
public void queryTypes(Collection<String> queryTypes) {
|
||||
this.queryTypes = queryTypes;
|
||||
}
|
||||
|
||||
public ObjectFloatHashMap<String> boosts() {
|
||||
return boosts;
|
||||
}
|
||||
|
@ -371,7 +355,6 @@ public class QueryParserSettings {
|
|||
if (useDisMax != that.useDisMax) return false;
|
||||
if (boosts != null ? !boosts.equals(that.boosts) : that.boosts != null) return false;
|
||||
if (fields != null ? !fields.equals(that.fields) : that.fields != null) return false;
|
||||
if (queryTypes != null ? !queryTypes.equals(that.queryTypes) : that.queryTypes != null) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -398,7 +381,6 @@ public class QueryParserSettings {
|
|||
result = 31 * result + (analyzeWildcard ? 1 : 0);
|
||||
|
||||
result = 31 * result + (fields != null ? fields.hashCode() : 0);
|
||||
result = 31 * result + (queryTypes != null ? queryTypes.hashCode() : 0);
|
||||
result = 31 * result + (boosts != null ? boosts.hashCode() : 0);
|
||||
result = 31 * result + (tieBreaker != +0.0f ? Float.floatToIntBits(tieBreaker) : 0);
|
||||
result = 31 * result + (useDisMax ? 1 : 0);
|
||||
|
|
|
@ -197,7 +197,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
|
|||
valid = false;
|
||||
error = e.getMessage();
|
||||
} finally {
|
||||
SearchContext.current().close();
|
||||
searchContext.close();
|
||||
SearchContext.removeCurrent();
|
||||
}
|
||||
|
||||
|
|
|
@ -174,9 +174,8 @@ public class TransportExistsAction extends TransportBroadcastAction<ExistsReques
|
|||
}
|
||||
context.preProcess();
|
||||
try {
|
||||
Lucene.EarlyTerminatingCollector existsCollector = Lucene.createExistsCollector();
|
||||
Lucene.exists(context.searcher(), context.query(), existsCollector);
|
||||
return new ShardExistsResponse(request.shardId(), existsCollector.exists());
|
||||
boolean exists = Lucene.exists(context, context.query(), Lucene.createExistsCollector());
|
||||
return new ShardExistsResponse(request.shardId(), exists);
|
||||
} catch (Exception e) {
|
||||
throw new QueryPhaseExecutionException(context, "failed to execute exists", e);
|
||||
}
|
||||
|
|
|
@ -47,7 +47,6 @@ import java.util.concurrent.atomic.AtomicReferenceArray;
|
|||
public abstract class TransportBroadcastAction<Request extends BroadcastRequest, Response extends BroadcastResponse, ShardRequest extends BroadcastShardRequest, ShardResponse extends BroadcastShardResponse>
|
||||
extends HandledTransportAction<Request, Response> {
|
||||
|
||||
protected final ThreadPool threadPool;
|
||||
protected final ClusterService clusterService;
|
||||
protected final TransportService transportService;
|
||||
|
||||
|
@ -59,7 +58,6 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest,
|
|||
super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request);
|
||||
this.clusterService = clusterService;
|
||||
this.transportService = transportService;
|
||||
this.threadPool = threadPool;
|
||||
this.transportShardAction = actionName + "[s]";
|
||||
|
||||
transportService.registerRequestHandler(transportShardAction, shardRequest, shardExecutor, new ShardTransportHandler());
|
||||
|
|
|
@ -46,7 +46,6 @@ public abstract class ReplicationRequest<T extends ReplicationRequest> extends A
|
|||
protected TimeValue timeout = DEFAULT_TIMEOUT;
|
||||
protected String index;
|
||||
|
||||
private boolean threadedOperation = true;
|
||||
private WriteConsistencyLevel consistencyLevel = WriteConsistencyLevel.DEFAULT;
|
||||
private volatile boolean canHaveDuplicates = false;
|
||||
|
||||
|
@ -76,7 +75,6 @@ public abstract class ReplicationRequest<T extends ReplicationRequest> extends A
|
|||
super(originalRequest);
|
||||
this.timeout = request.timeout();
|
||||
this.index = request.index();
|
||||
this.threadedOperation = request.operationThreaded();
|
||||
this.consistencyLevel = request.consistencyLevel();
|
||||
}
|
||||
|
||||
|
@ -91,23 +89,6 @@ public abstract class ReplicationRequest<T extends ReplicationRequest> extends A
|
|||
return canHaveDuplicates;
|
||||
}
|
||||
|
||||
/**
|
||||
* Controls if the operation will be executed on a separate thread when executed locally.
|
||||
*/
|
||||
public final boolean operationThreaded() {
|
||||
return threadedOperation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Controls if the operation will be executed on a separate thread when executed locally. Defaults
|
||||
* to <tt>true</tt> when running in embedded mode.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public final T operationThreaded(boolean threadedOperation) {
|
||||
this.threadedOperation = threadedOperation;
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>.
|
||||
*/
|
||||
|
|
|
@ -35,16 +35,6 @@ public abstract class ReplicationRequestBuilder<Request extends ReplicationReque
|
|||
super(client, action, request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Controls if the operation will be executed on a separate thread when executed locally. Defaults
|
||||
* to <tt>true</tt> when running in embedded mode.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public final RequestBuilder setOperationThreaded(boolean threadedOperation) {
|
||||
request.operationThreaded(threadedOperation);
|
||||
return (RequestBuilder) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>.
|
||||
*/
|
||||
|
|
|
@ -211,8 +211,6 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
|
|||
class OperationTransportHandler implements TransportRequestHandler<Request> {
|
||||
@Override
|
||||
public void messageReceived(final Request request, final TransportChannel channel) throws Exception {
|
||||
// if we have a local operation, execute it on a thread since we don't spawn
|
||||
request.operationThreaded(true);
|
||||
execute(request, new ActionListener<Response>() {
|
||||
@Override
|
||||
public void onResponse(Response result) {
|
||||
|
@ -440,21 +438,17 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
|
|||
protected void routeRequestOrPerformLocally(final ShardRouting primary, final ShardIterator shardsIt) {
|
||||
if (primary.currentNodeId().equals(observer.observedState().nodes().localNodeId())) {
|
||||
try {
|
||||
if (internalRequest.request().operationThreaded()) {
|
||||
threadPool.executor(executor).execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
finishAsFailed(t);
|
||||
}
|
||||
threadPool.executor(executor).execute(new AbstractRunnable() {
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
finishAsFailed(t);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
performOnPrimary(primary, shardsIt);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
performOnPrimary(primary, shardsIt);
|
||||
}
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
performOnPrimary(primary, shardsIt);
|
||||
}
|
||||
});
|
||||
} catch (Throwable t) {
|
||||
finishAsFailed(t);
|
||||
}
|
||||
|
@ -506,9 +500,6 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
|
|||
finishAsFailed(failure);
|
||||
return;
|
||||
}
|
||||
// make it threaded operation so we fork on the discovery listener thread
|
||||
internalRequest.request().operationThreaded(true);
|
||||
|
||||
observer.waitForNextChange(new ClusterStateObserver.Listener() {
|
||||
@Override
|
||||
public void onNewClusterState(ClusterState state) {
|
||||
|
@ -904,43 +895,33 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
|
|||
|
||||
});
|
||||
} else {
|
||||
if (replicaRequest.operationThreaded()) {
|
||||
try {
|
||||
threadPool.executor(executor).execute(new AbstractRunnable() {
|
||||
@Override
|
||||
protected void doRun() {
|
||||
try {
|
||||
shardOperationOnReplica(shard.shardId(), replicaRequest);
|
||||
onReplicaSuccess();
|
||||
} catch (Throwable e) {
|
||||
onReplicaFailure(nodeId, e);
|
||||
failReplicaIfNeeded(shard.index(), shard.id(), e);
|
||||
}
|
||||
try {
|
||||
threadPool.executor(executor).execute(new AbstractRunnable() {
|
||||
@Override
|
||||
protected void doRun() {
|
||||
try {
|
||||
shardOperationOnReplica(shard.shardId(), replicaRequest);
|
||||
onReplicaSuccess();
|
||||
} catch (Throwable e) {
|
||||
onReplicaFailure(nodeId, e);
|
||||
failReplicaIfNeeded(shard.index(), shard.id(), e);
|
||||
}
|
||||
}
|
||||
|
||||
// we must never reject on because of thread pool capacity on replicas
|
||||
@Override
|
||||
public boolean isForceExecution() {
|
||||
return true;
|
||||
}
|
||||
// we must never reject on because of thread pool capacity on replicas
|
||||
@Override
|
||||
public boolean isForceExecution() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
onReplicaFailure(nodeId, t);
|
||||
}
|
||||
});
|
||||
} catch (Throwable e) {
|
||||
failReplicaIfNeeded(shard.index(), shard.id(), e);
|
||||
onReplicaFailure(nodeId, e);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
shardOperationOnReplica(shard.shardId(), replicaRequest);
|
||||
onReplicaSuccess();
|
||||
} catch (Throwable e) {
|
||||
failReplicaIfNeeded(shard.index(), shard.id(), e);
|
||||
onReplicaFailure(nodeId, e);
|
||||
}
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
onReplicaFailure(nodeId, t);
|
||||
}
|
||||
});
|
||||
} catch (Throwable e) {
|
||||
failReplicaIfNeeded(shard.index(), shard.id(), e);
|
||||
onReplicaFailure(nodeId, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -131,7 +131,6 @@ public class UpdateHelper extends AbstractComponent {
|
|||
.routing(request.routing())
|
||||
.parent(request.parent())
|
||||
.consistencyLevel(request.consistencyLevel());
|
||||
indexRequest.operationThreaded(false);
|
||||
if (request.versionType() != VersionType.INTERNAL) {
|
||||
// in all but the internal versioning mode, we want to create the new document using the given version.
|
||||
indexRequest.version(request.version()).versionType(request.versionType());
|
||||
|
@ -227,13 +226,11 @@ public class UpdateHelper extends AbstractComponent {
|
|||
.consistencyLevel(request.consistencyLevel())
|
||||
.timestamp(timestamp).ttl(ttl)
|
||||
.refresh(request.refresh());
|
||||
indexRequest.operationThreaded(false);
|
||||
return new Result(indexRequest, Operation.INDEX, updatedSourceAsMap, updateSourceContentType);
|
||||
} else if ("delete".equals(operation)) {
|
||||
DeleteRequest deleteRequest = Requests.deleteRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent)
|
||||
.version(updateVersion).versionType(request.versionType())
|
||||
.consistencyLevel(request.consistencyLevel());
|
||||
deleteRequest.operationThreaded(false);
|
||||
return new Result(deleteRequest, Operation.DELETE, updatedSourceAsMap, updateSourceContentType);
|
||||
} else if ("none".equals(operation)) {
|
||||
UpdateResponse update = new UpdateResponse(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), false);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.cluster.routing;
|
||||
|
||||
import com.carrotsearch.hppc.IntSet;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.*;
|
||||
import org.elasticsearch.cluster.*;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -27,7 +28,6 @@ import org.elasticsearch.cluster.metadata.MetaData;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -162,28 +162,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
|
|||
* iterator contains a single ShardRouting pointing at the relocating target
|
||||
*/
|
||||
public GroupShardsIterator allActiveShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets) {
|
||||
// use list here since we need to maintain identity across shards
|
||||
ArrayList<ShardIterator> set = new ArrayList<>();
|
||||
for (String index : indices) {
|
||||
IndexRoutingTable indexRoutingTable = index(index);
|
||||
if (indexRoutingTable == null) {
|
||||
continue;
|
||||
// we simply ignore indices that don't exists (make sense for operations that use it currently)
|
||||
}
|
||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||
for (ShardRouting shardRouting : indexShardRoutingTable) {
|
||||
if (shardRouting.active()) {
|
||||
set.add(shardRouting.shardsIt());
|
||||
if (includeRelocationTargets && shardRouting.relocating()) {
|
||||
set.add(new PlainShardIterator(shardRouting.shardId(), Collections.singletonList(shardRouting.buildTargetRelocatingShard())));
|
||||
}
|
||||
} else if (includeEmpty) { // we need this for counting properly, just make it an empty one
|
||||
set.add(new PlainShardIterator(shardRouting.shardId(), Collections.<ShardRouting>emptyList()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return new GroupShardsIterator(set);
|
||||
return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, includeRelocationTargets, ACTIVE_PREDICATE);
|
||||
}
|
||||
|
||||
public GroupShardsIterator allAssignedShardsGrouped(String[] indices, boolean includeEmpty) {
|
||||
|
@ -198,6 +177,25 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
|
|||
* iterator contains a single ShardRouting pointing at the relocating target
|
||||
*/
|
||||
public GroupShardsIterator allAssignedShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets) {
|
||||
return allSatisfyingPredicateShardsGrouped(indices, includeEmpty, includeRelocationTargets, ASSIGNED_PREDICATE);
|
||||
}
|
||||
|
||||
private static Predicate<ShardRouting> ACTIVE_PREDICATE = new Predicate<ShardRouting>() {
|
||||
@Override
|
||||
public boolean apply(ShardRouting shardRouting) {
|
||||
return shardRouting.active();
|
||||
}
|
||||
};
|
||||
|
||||
private static Predicate<ShardRouting> ASSIGNED_PREDICATE = new Predicate<ShardRouting>() {
|
||||
@Override
|
||||
public boolean apply(ShardRouting shardRouting) {
|
||||
return shardRouting.assignedToNode();
|
||||
}
|
||||
};
|
||||
|
||||
// TODO: replace with JDK 8 native java.util.function.Predicate
|
||||
private GroupShardsIterator allSatisfyingPredicateShardsGrouped(String[] indices, boolean includeEmpty, boolean includeRelocationTargets, Predicate<ShardRouting> predicate) {
|
||||
// use list here since we need to maintain identity across shards
|
||||
ArrayList<ShardIterator> set = new ArrayList<>();
|
||||
for (String index : indices) {
|
||||
|
@ -208,7 +206,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
|
|||
}
|
||||
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
|
||||
for (ShardRouting shardRouting : indexShardRoutingTable) {
|
||||
if (shardRouting.assignedToNode()) {
|
||||
if (predicate.apply(shardRouting)) {
|
||||
set.add(shardRouting.shardsIt());
|
||||
if (includeRelocationTargets && shardRouting.relocating()) {
|
||||
set.add(new PlainShardIterator(shardRouting.shardId(), Collections.singletonList(shardRouting.buildTargetRelocatingShard())));
|
||||
|
|
|
@ -20,46 +20,14 @@
|
|||
package org.elasticsearch.common.lucene;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.codecs.DocValuesFormat;
|
||||
import org.apache.lucene.codecs.PostingsFormat;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.index.IndexFormatTooNewException;
|
||||
import org.apache.lucene.index.IndexFormatTooOldException;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.NoMergePolicy;
|
||||
import org.apache.lucene.index.SegmentCommitInfo;
|
||||
import org.apache.lucene.index.SegmentInfos;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.LeafCollector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.SimpleCollector;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.TimeLimitingCollector;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import org.apache.lucene.search.TwoPhaseIterator;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.Lock;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.store.*;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.Counter;
|
||||
|
@ -75,14 +43,11 @@ import org.elasticsearch.common.logging.ESLogger;
|
|||
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.common.lucene.search.NoopCollector.NOOP_COLLECTOR;
|
||||
|
||||
|
@ -379,6 +344,24 @@ public class Lucene {
|
|||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs an exists (count > 0) query on the searcher from the <code>searchContext</code> for <code>query</code>
|
||||
* using the given <code>collector</code>
|
||||
*
|
||||
* The <code>collector</code> can be instantiated using <code>Lucene.createExistsCollector()</code>
|
||||
*/
|
||||
public static boolean exists(SearchContext searchContext, Query query, EarlyTerminatingCollector collector) throws IOException {
|
||||
collector.reset();
|
||||
try {
|
||||
searchContext.searcher().search(query, collector);
|
||||
} catch (EarlyTerminationException e) {
|
||||
// ignore, just early termination...
|
||||
} finally {
|
||||
searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION);
|
||||
}
|
||||
return collector.exists();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an {@link org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector}
|
||||
* with a threshold of <code>1</code>
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.io.FastStringReader;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
||||
|
@ -36,7 +37,7 @@ public class PropertiesSettingsLoader implements SettingsLoader {
|
|||
|
||||
@Override
|
||||
public Map<String, String> load(String source) throws IOException {
|
||||
Properties props = new Properties();
|
||||
Properties props = new NoDuplicatesProperties();
|
||||
FastStringReader reader = new FastStringReader(source);
|
||||
try {
|
||||
props.load(reader);
|
||||
|
@ -52,7 +53,7 @@ public class PropertiesSettingsLoader implements SettingsLoader {
|
|||
|
||||
@Override
|
||||
public Map<String, String> load(byte[] source) throws IOException {
|
||||
Properties props = new Properties();
|
||||
Properties props = new NoDuplicatesProperties();
|
||||
StreamInput stream = StreamInput.wrap(source);
|
||||
try {
|
||||
props.load(stream);
|
||||
|
@ -65,4 +66,15 @@ public class PropertiesSettingsLoader implements SettingsLoader {
|
|||
IOUtils.closeWhileHandlingException(stream);
|
||||
}
|
||||
}
|
||||
|
||||
class NoDuplicatesProperties extends Properties {
|
||||
@Override
|
||||
public synchronized Object put(Object key, Object value) {
|
||||
Object previousValue = super.put(key, value);
|
||||
if (previousValue != null) {
|
||||
throw new ElasticsearchParseException("duplicate settings key [{}] found, previous value [{}], current value [{}]", key, previousValue, value);
|
||||
}
|
||||
return previousValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -141,7 +140,18 @@ public abstract class XContentSettingsLoader implements SettingsLoader {
|
|||
sb.append(pathEle).append('.');
|
||||
}
|
||||
sb.append(fieldName);
|
||||
settings.put(sb.toString(), parser.text());
|
||||
String key = sb.toString();
|
||||
String currentValue = parser.text();
|
||||
String previousValue = settings.put(key, currentValue);
|
||||
if (previousValue != null) {
|
||||
throw new ElasticsearchParseException(
|
||||
"duplicate settings key [{}] found at line number [{}], column number [{}], previous value [{}], current value [{}]",
|
||||
key,
|
||||
parser.getTokenLocation().lineNumber,
|
||||
parser.getTokenLocation().columnNumber,
|
||||
previousValue,
|
||||
currentValue
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||
|
||||
|
@ -215,19 +216,38 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable
|
|||
|
||||
Map<String, NamedAnalyzer> analyzers = newHashMap();
|
||||
for (AnalyzerProvider analyzerFactory : analyzerProviders.values()) {
|
||||
/*
|
||||
* Lucene defaults positionIncrementGap to 0 in all analyzers but
|
||||
* Elasticsearch defaults them to 0 only before version 2.0
|
||||
* and 100 afterwards so we override the positionIncrementGap if it
|
||||
* doesn't match here.
|
||||
*/
|
||||
int overridePositionIncrementGap = StringFieldMapper.Defaults.positionIncrementGap(Version.indexCreated(indexSettings));
|
||||
if (analyzerFactory instanceof CustomAnalyzerProvider) {
|
||||
((CustomAnalyzerProvider) analyzerFactory).build(this);
|
||||
/*
|
||||
* Custom analyzers already default to the correct, version
|
||||
* dependent positionIncrementGap and the user is be able to
|
||||
* configure the positionIncrementGap directly on the analyzer so
|
||||
* we disable overriding the positionIncrementGap to preserve the
|
||||
* user's setting.
|
||||
*/
|
||||
overridePositionIncrementGap = Integer.MIN_VALUE;
|
||||
}
|
||||
Analyzer analyzerF = analyzerFactory.get();
|
||||
if (analyzerF == null) {
|
||||
throw new IllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer");
|
||||
}
|
||||
NamedAnalyzer analyzer;
|
||||
// if we got a named analyzer back, use it...
|
||||
if (analyzerF instanceof NamedAnalyzer) {
|
||||
// if we got a named analyzer back, use it...
|
||||
analyzer = (NamedAnalyzer) analyzerF;
|
||||
if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) {
|
||||
// unless the positionIncrementGap needs to be overridden
|
||||
analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap);
|
||||
}
|
||||
} else {
|
||||
analyzer = new NamedAnalyzer(analyzerFactory.name(), analyzerFactory.scope(), analyzerF);
|
||||
analyzer = new NamedAnalyzer(analyzerFactory.name(), analyzerFactory.scope(), analyzerF, overridePositionIncrementGap);
|
||||
}
|
||||
analyzers.put(analyzerFactory.name(), analyzer);
|
||||
analyzers.put(Strings.toCamelCase(analyzerFactory.name()), analyzer);
|
||||
|
|
|
@ -44,11 +44,11 @@ public final class CustomAnalyzer extends Analyzer {
|
|||
}
|
||||
|
||||
public CustomAnalyzer(TokenizerFactory tokenizerFactory, CharFilterFactory[] charFilters, TokenFilterFactory[] tokenFilters,
|
||||
int positionOffsetGap, int offsetGap) {
|
||||
int positionIncrementGap, int offsetGap) {
|
||||
this.tokenizerFactory = tokenizerFactory;
|
||||
this.charFilters = charFilters;
|
||||
this.tokenFilters = tokenFilters;
|
||||
this.positionIncrementGap = positionOffsetGap;
|
||||
this.positionIncrementGap = positionIncrementGap;
|
||||
this.offsetGap = offsetGap;
|
||||
}
|
||||
|
||||
|
|
|
@ -19,10 +19,12 @@
|
|||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.assistedinject.Assisted;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -77,13 +79,28 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
|
|||
tokenFilters.add(tokenFilter);
|
||||
}
|
||||
|
||||
int positionOffsetGap = analyzerSettings.getAsInt("position_offset_gap", 0);
|
||||
int offsetGap = analyzerSettings.getAsInt("offset_gap", -1);
|
||||
int positionIncrementGap = StringFieldMapper.Defaults.positionIncrementGap(Version.indexCreated(indexSettings));
|
||||
|
||||
if (analyzerSettings.getAsMap().containsKey("position_offset_gap")){
|
||||
if (Version.indexCreated(indexSettings).before(Version.V_2_0_0)){
|
||||
if (analyzerSettings.getAsMap().containsKey("position_increment_gap")){
|
||||
throw new IllegalArgumentException("Custom Analyzer [" + name() +
|
||||
"] defined both [position_offset_gap] and [position_increment_gap], use only [position_increment_gap]");
|
||||
}
|
||||
positionIncrementGap = analyzerSettings.getAsInt("position_offset_gap", positionIncrementGap);
|
||||
}else {
|
||||
throw new IllegalArgumentException("Option [position_offset_gap] in Custom Analyzer [" + name() +
|
||||
"] has been renamed, please use [position_increment_gap] instead.");
|
||||
}
|
||||
}
|
||||
|
||||
positionIncrementGap = analyzerSettings.getAsInt("position_increment_gap", positionIncrementGap);
|
||||
|
||||
int offsetGap = analyzerSettings.getAsInt("offset_gap", -1);;
|
||||
this.customAnalyzer = new CustomAnalyzer(tokenizer,
|
||||
charFilters.toArray(new CharFilterFactory[charFilters.size()]),
|
||||
tokenFilters.toArray(new TokenFilterFactory[tokenFilters.size()]),
|
||||
positionOffsetGap,
|
||||
positionIncrementGap,
|
||||
offsetGap
|
||||
);
|
||||
}
|
||||
|
|
|
@ -31,10 +31,10 @@ public class NamedAnalyzer extends DelegatingAnalyzerWrapper {
|
|||
private final String name;
|
||||
private final AnalyzerScope scope;
|
||||
private final Analyzer analyzer;
|
||||
private final int positionOffsetGap;
|
||||
private final int positionIncrementGap;
|
||||
|
||||
public NamedAnalyzer(NamedAnalyzer analyzer, int positionOffsetGap) {
|
||||
this(analyzer.name(), analyzer.scope(), analyzer.analyzer(), positionOffsetGap);
|
||||
public NamedAnalyzer(NamedAnalyzer analyzer, int positionIncrementGap) {
|
||||
this(analyzer.name(), analyzer.scope(), analyzer.analyzer(), positionIncrementGap);
|
||||
}
|
||||
|
||||
public NamedAnalyzer(String name, Analyzer analyzer) {
|
||||
|
@ -45,12 +45,12 @@ public class NamedAnalyzer extends DelegatingAnalyzerWrapper {
|
|||
this(name, scope, analyzer, Integer.MIN_VALUE);
|
||||
}
|
||||
|
||||
public NamedAnalyzer(String name, AnalyzerScope scope, Analyzer analyzer, int positionOffsetGap) {
|
||||
public NamedAnalyzer(String name, AnalyzerScope scope, Analyzer analyzer, int positionIncrementGap) {
|
||||
super(ERROR_STRATEGY);
|
||||
this.name = name;
|
||||
this.scope = scope;
|
||||
this.analyzer = analyzer;
|
||||
this.positionOffsetGap = positionOffsetGap;
|
||||
this.positionIncrementGap = positionIncrementGap;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -81,8 +81,8 @@ public class NamedAnalyzer extends DelegatingAnalyzerWrapper {
|
|||
|
||||
@Override
|
||||
public int getPositionIncrementGap(String fieldName) {
|
||||
if (positionOffsetGap != Integer.MIN_VALUE) {
|
||||
return positionOffsetGap;
|
||||
if (positionIncrementGap != Integer.MIN_VALUE) {
|
||||
return positionIncrementGap;
|
||||
}
|
||||
return super.getPositionIncrementGap(fieldName);
|
||||
}
|
||||
|
|
|
@ -100,33 +100,36 @@ class DocumentParser implements Closeable {
|
|||
context.reset(parser, new ParseContext.Document(), source);
|
||||
|
||||
// will result in START_OBJECT
|
||||
int countDownTokens = 0;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new MapperParsingException("Malformed content, must start with an object");
|
||||
}
|
||||
boolean emptyDoc = false;
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.END_OBJECT) {
|
||||
// empty doc, we can handle it...
|
||||
emptyDoc = true;
|
||||
} else if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
|
||||
}
|
||||
|
||||
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
|
||||
metadataMapper.preParse(context);
|
||||
}
|
||||
|
||||
if (!emptyDoc) {
|
||||
Mapper update = parseObject(context, mapping.root);
|
||||
if (update != null) {
|
||||
context.addDynamicMappingsUpdate(update);
|
||||
if (mapping.root.isEnabled()) {
|
||||
boolean emptyDoc = false;
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.END_OBJECT) {
|
||||
// empty doc, we can handle it...
|
||||
emptyDoc = true;
|
||||
} else if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = 0; i < countDownTokens; i++) {
|
||||
parser.nextToken();
|
||||
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
|
||||
metadataMapper.preParse(context);
|
||||
}
|
||||
if (emptyDoc == false) {
|
||||
Mapper update = parseObject(context, mapping.root);
|
||||
if (update != null) {
|
||||
context.addDynamicMappingsUpdate(update);
|
||||
}
|
||||
}
|
||||
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
|
||||
metadataMapper.postParse(context);
|
||||
}
|
||||
|
||||
} else {
|
||||
// entire type is disabled
|
||||
parser.skipChildren();
|
||||
}
|
||||
|
||||
// try to parse the next token, this should be null if the object is ended properly
|
||||
|
@ -135,12 +138,11 @@ class DocumentParser implements Closeable {
|
|||
&& source.parser() == null && parser != null) {
|
||||
// only check for end of tokens if we created the parser here
|
||||
token = parser.nextToken();
|
||||
assert token == null; // double check, in tests, that we didn't end parsing early
|
||||
if (token != null) {
|
||||
throw new IllegalArgumentException("Malformed content, found extra data after parsing: " + token);
|
||||
}
|
||||
}
|
||||
|
||||
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
|
||||
metadataMapper.postParse(context);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
// if its already a mapper parsing exception, no need to wrap it...
|
||||
if (e instanceof MapperParsingException) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.document.SortedSetDocValuesField;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -52,6 +53,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
|
|||
public class StringFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll {
|
||||
|
||||
public static final String CONTENT_TYPE = "string";
|
||||
private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1;
|
||||
|
||||
public static class Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new StringFieldType();
|
||||
|
@ -62,15 +64,38 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
|
||||
// NOTE, when adding defaults here, make sure you add them in the builder
|
||||
public static final String NULL_VALUE = null;
|
||||
public static final int POSITION_OFFSET_GAP = 0;
|
||||
|
||||
/**
|
||||
* Post 2.0 default for position_increment_gap. Set to 100 so that
|
||||
* phrase queries of reasonably high slop will not match across field
|
||||
* values.
|
||||
*/
|
||||
public static final int POSITION_INCREMENT_GAP = 100;
|
||||
public static final int POSITION_INCREMENT_GAP_PRE_2_0 = 0;
|
||||
|
||||
public static final int IGNORE_ABOVE = -1;
|
||||
|
||||
/**
|
||||
* The default position_increment_gap for a particular version of Elasticsearch.
|
||||
*/
|
||||
public static int positionIncrementGap(Version version) {
|
||||
if (version.before(Version.V_2_0_0_beta1)) {
|
||||
return POSITION_INCREMENT_GAP_PRE_2_0;
|
||||
}
|
||||
return POSITION_INCREMENT_GAP;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder, StringFieldMapper> {
|
||||
|
||||
protected String nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
protected int positionOffsetGap = Defaults.POSITION_OFFSET_GAP;
|
||||
/**
|
||||
* The distance between tokens from different values in the same field.
|
||||
* POSITION_INCREMENT_GAP_USE_ANALYZER means default to the analyzer's
|
||||
* setting which in turn defaults to Defaults.POSITION_INCREMENT_GAP.
|
||||
*/
|
||||
protected int positionIncrementGap = POSITION_INCREMENT_GAP_USE_ANALYZER;
|
||||
|
||||
protected int ignoreAbove = Defaults.IGNORE_ABOVE;
|
||||
|
||||
|
@ -85,8 +110,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder positionOffsetGap(int positionOffsetGap) {
|
||||
this.positionOffsetGap = positionOffsetGap;
|
||||
public Builder positionIncrementGap(int positionIncrementGap) {
|
||||
this.positionIncrementGap = positionIncrementGap;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -102,10 +127,10 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
|
||||
@Override
|
||||
public StringFieldMapper build(BuilderContext context) {
|
||||
if (positionOffsetGap > 0) {
|
||||
fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionOffsetGap));
|
||||
fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionOffsetGap));
|
||||
fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionOffsetGap));
|
||||
if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
|
||||
fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionIncrementGap));
|
||||
fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionIncrementGap));
|
||||
fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap));
|
||||
}
|
||||
// if the field is not analyzed, then by default, we should omit norms and have docs only
|
||||
// index options, as probably what the user really wants
|
||||
|
@ -124,7 +149,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
}
|
||||
setupFieldType(context);
|
||||
StringFieldMapper fieldMapper = new StringFieldMapper(
|
||||
name, fieldType, defaultFieldType, positionOffsetGap, ignoreAbove,
|
||||
name, fieldType, defaultFieldType, positionIncrementGap, ignoreAbove,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
|
@ -153,10 +178,15 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
}
|
||||
builder.searchQuotedAnalyzer(analyzer);
|
||||
iterator.remove();
|
||||
} else if (propName.equals("position_offset_gap")) {
|
||||
builder.positionOffsetGap(XContentMapValues.nodeIntegerValue(propNode, -1));
|
||||
} else if (propName.equals("position_increment_gap") ||
|
||||
parserContext.indexVersionCreated().before(Version.V_2_0_0) && propName.equals("position_offset_gap")) {
|
||||
int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
|
||||
if (newPositionIncrementGap < 0) {
|
||||
throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed.");
|
||||
}
|
||||
builder.positionIncrementGap(newPositionIncrementGap);
|
||||
// we need to update to actual analyzers if they are not set in this case...
|
||||
// so we can inject the position offset gap...
|
||||
// so we can inject the position increment gap...
|
||||
if (builder.fieldType().indexAnalyzer() == null) {
|
||||
builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
|
||||
}
|
||||
|
@ -213,17 +243,17 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
}
|
||||
|
||||
private Boolean includeInAll;
|
||||
private int positionOffsetGap;
|
||||
private int positionIncrementGap;
|
||||
private int ignoreAbove;
|
||||
|
||||
protected StringFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
int positionOffsetGap, int ignoreAbove,
|
||||
int positionIncrementGap, int ignoreAbove,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) {
|
||||
throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values");
|
||||
}
|
||||
this.positionOffsetGap = positionOffsetGap;
|
||||
this.positionIncrementGap = positionIncrementGap;
|
||||
this.ignoreAbove = ignoreAbove;
|
||||
}
|
||||
|
||||
|
@ -251,8 +281,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return true;
|
||||
}
|
||||
|
||||
public int getPositionOffsetGap() {
|
||||
return this.positionOffsetGap;
|
||||
public int getPositionIncrementGap() {
|
||||
return this.positionIncrementGap;
|
||||
}
|
||||
|
||||
public int getIgnoreAbove() {
|
||||
|
@ -354,8 +384,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
builder.field("include_in_all", false);
|
||||
}
|
||||
|
||||
if (includeDefaults || positionOffsetGap != Defaults.POSITION_OFFSET_GAP) {
|
||||
builder.field("position_offset_gap", positionOffsetGap);
|
||||
if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
|
||||
builder.field("position_increment_gap", positionIncrementGap);
|
||||
}
|
||||
NamedAnalyzer searchQuoteAnalyzer = fieldType().searchQuoteAnalyzer();
|
||||
if (searchQuoteAnalyzer != null && !searchQuoteAnalyzer.name().equals(fieldType().searchAnalyzer().name())) {
|
||||
|
|
|
@ -255,11 +255,9 @@ public class HasChildQueryParser implements QueryParser {
|
|||
throw new IllegalArgumentException("Search context is required to be set");
|
||||
}
|
||||
|
||||
IndexSearcher indexSearcher = searchContext.searcher();
|
||||
String joinField = ParentFieldMapper.joinField(parentType);
|
||||
IndexReader indexReader = searchContext.searcher().getIndexReader();
|
||||
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
|
||||
indexSearcher.setQueryCache(null);
|
||||
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexReader);
|
||||
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexSearcher.getIndexReader());
|
||||
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
|
||||
return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
|
||||
}
|
||||
|
|
|
@ -222,8 +222,6 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
qpSettings.queryString(org.apache.lucene.queryparser.classic.QueryParser.escape(qpSettings.queryString()));
|
||||
}
|
||||
|
||||
qpSettings.queryTypes(parseContext.queryTypes());
|
||||
|
||||
MapperQueryParser queryParser = parseContext.queryParser(qpSettings);
|
||||
|
||||
try {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -26,7 +27,6 @@ import org.elasticsearch.common.lucene.search.Queries;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
|
@ -54,7 +54,7 @@ public class NestedInnerQueryParseSupport {
|
|||
protected boolean filterFound = false;
|
||||
|
||||
protected BitDocIdSetFilter parentFilter;
|
||||
protected BitDocIdSetFilter childFilter;
|
||||
protected Filter childFilter;
|
||||
|
||||
protected ObjectMapper nestedObjectMapper;
|
||||
private ObjectMapper parentObjectMapper;
|
||||
|
@ -191,7 +191,7 @@ public class NestedInnerQueryParseSupport {
|
|||
} else {
|
||||
parentFilter = parseContext.bitsetFilter(objectMapper.nestedTypeFilter());
|
||||
}
|
||||
childFilter = parseContext.bitsetFilter(nestedObjectMapper.nestedTypeFilter());
|
||||
childFilter = nestedObjectMapper.nestedTypeFilter();
|
||||
parentObjectMapper = parseContext.nestedScope().nextLevel(nestedObjectMapper);
|
||||
}
|
||||
|
||||
|
|
|
@ -103,11 +103,8 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
|
|||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
}
|
||||
|
||||
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
|
||||
indexSearcher.setSimilarity(searcher.getSimilarity(true));
|
||||
indexSearcher.setQueryCache(null);
|
||||
ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType);
|
||||
indexSearcher.search(childQuery, collector);
|
||||
searcher.search(childQuery, collector);
|
||||
|
||||
final long remaining = collector.foundParents();
|
||||
if (remaining == 0) {
|
||||
|
|
|
@ -152,9 +152,6 @@ public final class ChildrenQuery extends IndexCacheableQuery {
|
|||
// No docs of the specified type exist on this shard
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
}
|
||||
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
|
||||
indexSearcher.setSimilarity(searcher.getSimilarity(true));
|
||||
indexSearcher.setQueryCache(null);
|
||||
|
||||
boolean abort = true;
|
||||
long numFoundParents;
|
||||
|
@ -193,7 +190,7 @@ public final class ChildrenQuery extends IndexCacheableQuery {
|
|||
}
|
||||
}
|
||||
|
||||
indexSearcher.search(childQuery, collector);
|
||||
searcher.search(childQuery, collector);
|
||||
numFoundParents = collector.foundParents();
|
||||
if (numFoundParents == 0) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
|
|
|
@ -22,16 +22,7 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.FilteredDocIdSetIterator;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.LongBitSet;
|
||||
import org.elasticsearch.common.lucene.IndexCacheableQuery;
|
||||
|
@ -92,10 +83,7 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery {
|
|||
}
|
||||
|
||||
ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType);
|
||||
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
|
||||
indexSearcher.setSimilarity(searcher.getSimilarity(true));
|
||||
indexSearcher.setQueryCache(null);
|
||||
indexSearcher.search(parentQuery, collector);
|
||||
searcher.search(parentQuery, collector);
|
||||
|
||||
if (collector.parentCount() == 0) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
|
|
|
@ -129,10 +129,7 @@ public class ParentQuery extends IndexCacheableQuery {
|
|||
|
||||
try {
|
||||
collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType);
|
||||
IndexSearcher indexSearcher = new IndexSearcher(sc.searcher().getIndexReader());
|
||||
indexSearcher.setSimilarity(searcher.getSimilarity(true));
|
||||
indexSearcher.setQueryCache(null);
|
||||
indexSearcher.search(parentQuery, collector);
|
||||
searcher.search(parentQuery, collector);
|
||||
if (collector.parentCount() == 0) {
|
||||
return new BooleanQuery().createWeight(searcher, needsScores);
|
||||
}
|
||||
|
|
|
@ -19,22 +19,14 @@
|
|||
package org.elasticsearch.percolator;
|
||||
|
||||
import com.carrotsearch.hppc.FloatArrayList;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.LeafCollector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.SimpleCollector;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopScoreDocCollector;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
|
|
|
@ -50,9 +50,6 @@ public class RestDeleteAction extends BaseRestHandler {
|
|||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
|
||||
DeleteRequest deleteRequest = new DeleteRequest(request.param("index"), request.param("type"), request.param("id"));
|
||||
|
||||
deleteRequest.operationThreaded(true);
|
||||
|
||||
deleteRequest.routing(request.param("routing"));
|
||||
deleteRequest.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing
|
||||
deleteRequest.timeout(request.paramAsTime("timeout", DeleteRequest.DEFAULT_TIMEOUT));
|
||||
|
|
|
@ -70,7 +70,6 @@ public class RestIndexAction extends BaseRestHandler {
|
|||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
|
||||
IndexRequest indexRequest = new IndexRequest(request.param("index"), request.param("type"), request.param("id"));
|
||||
indexRequest.operationThreaded(true);
|
||||
indexRequest.routing(request.param("routing"));
|
||||
indexRequest.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing
|
||||
indexRequest.timestamp(request.param("timestamp"));
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -130,6 +129,8 @@ public class AggregationPhase implements SearchPhase {
|
|||
context.searcher().search(query, globalsCollector);
|
||||
} catch (Exception e) {
|
||||
throw new QueryPhaseExecutionException(context, "Failed to execute global aggregators", e);
|
||||
} finally {
|
||||
context.clearReleasables(SearchContext.Lifetime.COLLECTION);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -145,8 +145,8 @@ public class GeoDistanceParser implements Aggregator.Parser {
|
|||
+ currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
} else {
|
||||
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "].",
|
||||
parser.getTokenLocation());
|
||||
throw new SearchParseException(context, "Unexpected token " + token + " in [" + aggregationName + "]: ["
|
||||
+ currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.elasticsearch.search.fetch.explain;
|
|||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
|
@ -68,6 +67,8 @@ public class ExplainFetchSubPhase implements FetchSubPhase {
|
|||
hitContext.hit().explanation(explanation);
|
||||
} catch (IOException e) {
|
||||
throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().type() + "#" + hitContext.hit().id() + "]", e);
|
||||
} finally {
|
||||
context.clearReleasables(SearchContext.Lifetime.COLLECTION);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -135,7 +135,11 @@ public final class InnerHitsContext {
|
|||
} else {
|
||||
topDocsCollector = TopScoreDocCollector.create(topN);
|
||||
}
|
||||
context.searcher().search(q, topDocsCollector);
|
||||
try {
|
||||
context.searcher().search(q, topDocsCollector);
|
||||
} finally {
|
||||
clearReleasables(Lifetime.COLLECTION);
|
||||
}
|
||||
return topDocsCollector.topDocs(from(), size());
|
||||
}
|
||||
}
|
||||
|
@ -306,7 +310,11 @@ public final class InnerHitsContext {
|
|||
} else {
|
||||
topDocsCollector = TopScoreDocCollector.create(topN);
|
||||
}
|
||||
context.searcher().search( q, topDocsCollector);
|
||||
try {
|
||||
context.searcher().search(q, topDocsCollector);
|
||||
} finally {
|
||||
clearReleasables(Lifetime.COLLECTION);
|
||||
}
|
||||
return topDocsCollector.topDocs(from(), size());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,18 +19,14 @@
|
|||
|
||||
package org.elasticsearch.search.internal;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermContext;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.search.dfs.AggregatedDfs;
|
||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Context-aware extension of {@link IndexSearcher}.
|
||||
|
@ -42,14 +38,11 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
|||
* AssertingIndexSearcher. */
|
||||
private final IndexSearcher in;
|
||||
|
||||
private final SearchContext searchContext;
|
||||
|
||||
private AggregatedDfs aggregatedDfs;
|
||||
|
||||
public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) {
|
||||
super(searcher.reader());
|
||||
in = searcher.searcher();
|
||||
this.searchContext = searchContext;
|
||||
setSimilarity(searcher.searcher().getSimilarity(true));
|
||||
setQueryCache(searchContext.indexShard().indexService().cache().query());
|
||||
setQueryCachingPolicy(searchContext.indexShard().getQueryCachingPolicy());
|
||||
|
@ -65,46 +58,23 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
|||
|
||||
@Override
|
||||
public Query rewrite(Query original) throws IOException {
|
||||
try {
|
||||
return in.rewrite(original);
|
||||
} catch (Throwable t) {
|
||||
searchContext.clearReleasables(Lifetime.COLLECTION);
|
||||
throw ExceptionsHelper.convertToElastic(t);
|
||||
}
|
||||
return in.rewrite(original);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
|
||||
// During tests we prefer to use the wrapped IndexSearcher, because then we use the AssertingIndexSearcher
|
||||
// it is hacky, because if we perform a dfs search, we don't use the wrapped IndexSearcher...
|
||||
try {
|
||||
if (aggregatedDfs != null && needsScores) {
|
||||
// if scores are needed and we have dfs data then use it
|
||||
if (aggregatedDfs != null && needsScores) {
|
||||
return super.createNormalizedWeight(query, needsScores);
|
||||
}
|
||||
return in.createNormalizedWeight(query, needsScores);
|
||||
} catch (Throwable t) {
|
||||
searchContext.clearReleasables(Lifetime.COLLECTION);
|
||||
throw ExceptionsHelper.convertToElastic(t);
|
||||
return super.createNormalizedWeight(query, needsScores);
|
||||
}
|
||||
return in.createNormalizedWeight(query, needsScores);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(Query query, int doc) throws IOException {
|
||||
try {
|
||||
return in.explain(query, doc);
|
||||
} finally {
|
||||
searchContext.clearReleasables(Lifetime.COLLECTION);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
|
||||
try {
|
||||
super.search(leaves, weight, collector);
|
||||
} finally {
|
||||
searchContext.clearReleasables(Lifetime.COLLECTION);
|
||||
}
|
||||
return in.explain(query, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.internal;
|
|||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Multimap;
|
||||
import com.google.common.collect.MultimapBuilder;
|
||||
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Sort;
|
||||
|
@ -46,7 +45,6 @@ import org.elasticsearch.index.query.QueryParseContext;
|
|||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
import org.elasticsearch.search.dfs.DfsSearchResult;
|
||||
|
|
|
@ -20,24 +20,8 @@
|
|||
package org.elasticsearch.search.query;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.apache.lucene.queries.MinDocQuery;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MultiCollector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TimeLimitingCollector;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopDocsCollector;
|
||||
import org.apache.lucene.search.TopFieldCollector;
|
||||
import org.apache.lucene.search.TopScoreDocCollector;
|
||||
import org.apache.lucene.search.TotalHitCountCollector;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
|
@ -173,8 +157,8 @@ public class QueryPhase implements SearchPhase {
|
|||
// skip to the desired doc and stop collecting after ${size} matches
|
||||
if (scrollContext.lastEmittedDoc != null) {
|
||||
BooleanQuery bq = new BooleanQuery();
|
||||
bq.add(query, Occur.MUST);
|
||||
bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), Occur.FILTER);
|
||||
bq.add(query, BooleanClause.Occur.MUST);
|
||||
bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER);
|
||||
query = bq;
|
||||
}
|
||||
searchContext.terminateAfter(numDocs);
|
||||
|
@ -264,13 +248,15 @@ public class QueryPhase implements SearchPhase {
|
|||
}
|
||||
|
||||
try {
|
||||
searcher.search(query, collector);
|
||||
searchContext.searcher().search(query, collector);
|
||||
} catch (TimeLimitingCollector.TimeExceededException e) {
|
||||
assert timeoutSet : "TimeExceededException thrown even though timeout wasn't set";
|
||||
searchContext.queryResult().searchTimedOut(true);
|
||||
} catch (Lucene.EarlyTerminationException e) {
|
||||
assert terminateAfterSet : "EarlyTerminationException thrown even though terminateAfter wasn't set";
|
||||
searchContext.queryResult().terminatedEarly(true);
|
||||
} finally {
|
||||
searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION);
|
||||
}
|
||||
if (terminateAfterSet && searchContext.queryResult().terminatedEarly() == null) {
|
||||
searchContext.queryResult().terminatedEarly(false);
|
||||
|
|
|
@ -661,7 +661,6 @@ public class ShardReplicationTests extends ESTestCase {
|
|||
public AtomicInteger processedOnReplicas = new AtomicInteger();
|
||||
|
||||
Request() {
|
||||
this.operationThreaded(randomBoolean());
|
||||
}
|
||||
|
||||
Request(ShardId shardId) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.bwcompat;
|
|||
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.util.concurrent.ListenableFuture;
|
||||
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -40,6 +41,7 @@ import org.elasticsearch.common.util.MultiDataPathUpgrader;
|
|||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.env.NodeEnvironment;
|
||||
import org.elasticsearch.index.engine.EngineConfig;
|
||||
import org.elasticsearch.index.mapper.string.StringFieldMapperPositionIncrementGapTests;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.shard.MergePolicyConfig;
|
||||
import org.elasticsearch.indices.recovery.RecoverySettings;
|
||||
|
@ -330,6 +332,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
|
|||
assertNewReplicasWork(indexName);
|
||||
assertUpgradeWorks(indexName, isLatestLuceneVersion(version));
|
||||
assertDeleteByQueryWorked(indexName, version);
|
||||
assertPositionIncrementGapDefaults(indexName, version);
|
||||
unloadIndex(indexName);
|
||||
}
|
||||
|
||||
|
@ -442,6 +445,14 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase {
|
|||
assertEquals(0, searchReq.get().getHits().getTotalHits());
|
||||
}
|
||||
|
||||
void assertPositionIncrementGapDefaults(String indexName, Version version) throws Exception {
|
||||
if (version.before(Version.V_2_0_0_beta1)) {
|
||||
StringFieldMapperPositionIncrementGapTests.assertGapIsZero(client(), indexName, "doc");
|
||||
} else {
|
||||
StringFieldMapperPositionIncrementGapTests.assertGapIsOneHundred(client(), indexName, "doc");
|
||||
}
|
||||
}
|
||||
|
||||
void assertUpgradeWorks(String indexName, boolean alreadyLatest) throws Exception {
|
||||
if (alreadyLatest == false) {
|
||||
UpgradeIT.assertNotUpgraded(client(), indexName);
|
||||
|
|
|
@ -19,19 +19,19 @@
|
|||
|
||||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class JsonSettingsLoaderTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testSimpleJsonSettings() throws Exception {
|
||||
String json = "/org/elasticsearch/common/settings/loader/test-settings.json";
|
||||
|
@ -50,4 +50,17 @@ public class JsonSettingsLoaderTests extends ESTestCase {
|
|||
assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
|
||||
assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
|
||||
}
|
||||
|
||||
public void testDuplicateKeysThrowsException() {
|
||||
String json = "{\"foo\":\"bar\",\"foo\":\"baz\"}";
|
||||
try {
|
||||
settingsBuilder()
|
||||
.loadFromSource(json)
|
||||
.build();
|
||||
fail("expected exception");
|
||||
} catch (SettingsException e) {
|
||||
assertEquals(e.getCause().getClass(), ElasticsearchParseException.class);
|
||||
assertTrue(e.toString().contains("duplicate settings key [foo] found at line number [1], column number [13], previous value [bar], current value [baz]"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
|
||||
public class PropertiesSettingsLoaderTests extends ESTestCase {
|
||||
public void testDuplicateKeyFromStringThrowsException() throws IOException {
|
||||
PropertiesSettingsLoader loader = new PropertiesSettingsLoader();
|
||||
try {
|
||||
loader.load("foo=bar\nfoo=baz");
|
||||
fail("expected exception");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertEquals(e.getMessage(), "duplicate settings key [foo] found, previous value [bar], current value [baz]");
|
||||
}
|
||||
}
|
||||
|
||||
public void testDuplicateKeysFromBytesThrowsException() throws IOException {
|
||||
PropertiesSettingsLoader loader = new PropertiesSettingsLoader();
|
||||
try {
|
||||
loader.load("foo=bar\nfoo=baz".getBytes(Charset.defaultCharset()));
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertEquals(e.getMessage(), "duplicate settings key [foo] found, previous value [bar], current value [baz]");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.common.settings.loader;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -31,7 +32,6 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
*
|
||||
*/
|
||||
public class YamlSettingsLoaderTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testSimpleYamlSettings() throws Exception {
|
||||
String yaml = "/org/elasticsearch/common/settings/loader/test-settings.yml";
|
||||
|
@ -66,4 +66,17 @@ public class YamlSettingsLoaderTests extends ESTestCase {
|
|||
.loadFromStream(yaml, getClass().getResourceAsStream(yaml))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
public void testDuplicateKeysThrowsException() {
|
||||
String yaml = "foo: bar\nfoo: baz";
|
||||
try {
|
||||
settingsBuilder()
|
||||
.loadFromSource(yaml)
|
||||
.build();
|
||||
fail("expected exception");
|
||||
} catch (SettingsException e) {
|
||||
assertEquals(e.getCause().getClass(), ElasticsearchParseException.class);
|
||||
assertTrue(e.toString().contains("duplicate settings key [foo] found at line number [2], column number [6], previous value [bar], current value [baz]"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,8 +42,8 @@ import org.elasticsearch.index.analysis.filter1.MyFilterTokenFilterFactory;
|
|||
import org.elasticsearch.index.settings.IndexSettingsModule;
|
||||
import org.elasticsearch.indices.analysis.IndicesAnalysisService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.IOException;
|
||||
|
@ -87,26 +87,22 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleConfigurationJson() {
|
||||
Settings settings = loadFromClasspath("/org/elasticsearch/index/analysis/test1.json");
|
||||
testSimpleConfiguration(settings);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleConfigurationYaml() {
|
||||
Settings settings = loadFromClasspath("/org/elasticsearch/index/analysis/test1.yml");
|
||||
testSimpleConfiguration(settings);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDefaultFactoryTokenFilters() throws IOException {
|
||||
assertTokenFilter("keyword_repeat", KeywordRepeatFilter.class);
|
||||
assertTokenFilter("persian_normalization", PersianNormalizationFilter.class);
|
||||
assertTokenFilter("arabic_normalization", ArabicNormalizationFilter.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testVersionedAnalyzers() throws Exception {
|
||||
String yaml = "/org/elasticsearch/index/analysis/test1.yml";
|
||||
Settings settings2 = settingsBuilder()
|
||||
|
@ -164,7 +160,7 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
// html = (HtmlStripCharFilterFactory) custom2.charFilters()[1];
|
||||
// assertThat(html.readAheadLimit(), equalTo(1024));
|
||||
|
||||
// verify position offset gap
|
||||
// verify position increment gap
|
||||
analyzer = analysisService.analyzer("custom6").analyzer();
|
||||
assertThat(analyzer, instanceOf(CustomAnalyzer.class));
|
||||
CustomAnalyzer custom6 = (CustomAnalyzer) analyzer;
|
||||
|
@ -215,7 +211,6 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
// MatcherAssert.assertThat(wordList, hasItems("donau", "dampf", "schiff", "spargel", "creme", "suppe"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWordListPath() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("path.home", createTempDir().toString())
|
||||
|
@ -243,7 +238,6 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
return wordListFile;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUnderscoreInAnalyzerName() {
|
||||
Settings settings = Settings.builder()
|
||||
.put("index.analysis.analyzer._invalid_name.tokenizer", "keyword")
|
||||
|
@ -259,7 +253,6 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUnderscoreInAnalyzerNameAlias() {
|
||||
Settings settings = Settings.builder()
|
||||
.put("index.analysis.analyzer.valid_name.tokenizer", "keyword")
|
||||
|
@ -275,4 +268,61 @@ public class AnalysisModuleTests extends ESTestCase {
|
|||
assertThat(e.getCause().getMessage(), equalTo("analyzer name must not start with '_'. got \"_invalid_name\""));
|
||||
}
|
||||
}
|
||||
|
||||
public void testBackwardCompatible() {
|
||||
Settings settings = settingsBuilder()
|
||||
.put("index.analysis.analyzer.custom1.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.custom1.position_offset_gap", "128")
|
||||
.put("index.analysis.analyzer.custom2.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.custom2.position_increment_gap", "256")
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
|
||||
Version.V_1_7_1))
|
||||
.build();
|
||||
AnalysisService analysisService = getAnalysisService(settings);
|
||||
|
||||
Analyzer custom1 = analysisService.analyzer("custom1").analyzer();
|
||||
assertThat(custom1, instanceOf(CustomAnalyzer.class));
|
||||
assertThat(custom1.getPositionIncrementGap("custom1"), equalTo(128));
|
||||
|
||||
Analyzer custom2 = analysisService.analyzer("custom2").analyzer();
|
||||
assertThat(custom2, instanceOf(CustomAnalyzer.class));
|
||||
assertThat(custom2.getPositionIncrementGap("custom2"), equalTo(256));
|
||||
}
|
||||
|
||||
public void testWithBothSettings() {
|
||||
Settings settings = settingsBuilder()
|
||||
.put("index.analysis.analyzer.custom.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.custom.position_offset_gap", "128")
|
||||
.put("index.analysis.analyzer.custom.position_increment_gap", "256")
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
|
||||
Version.V_1_7_1))
|
||||
.build();
|
||||
try {
|
||||
getAnalysisService(settings);
|
||||
fail("Analyzer has both position_offset_gap and position_increment_gap should fail");
|
||||
} catch (ProvisionException e) {
|
||||
assertTrue(e.getCause() instanceof IllegalArgumentException);
|
||||
assertThat(e.getCause().getMessage(), equalTo("Custom Analyzer [custom] defined both [position_offset_gap] and [position_increment_gap]" +
|
||||
", use only [position_increment_gap]"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testDeprecatedPositionOffsetGap() {
|
||||
Settings settings = settingsBuilder()
|
||||
.put("index.analysis.analyzer.custom.tokenizer", "standard")
|
||||
.put("index.analysis.analyzer.custom.position_offset_gap", "128")
|
||||
.put("path.home", createTempDir().toString())
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.build();
|
||||
try {
|
||||
getAnalysisService(settings);
|
||||
fail("Analyzer should fail if it has position_offset_gap");
|
||||
} catch (ProvisionException e) {
|
||||
assertTrue(e.getCause() instanceof IllegalArgumentException);
|
||||
assertThat(e.getCause().getMessage(), equalTo("Option [position_offset_gap] in Custom Analyzer [custom] " +
|
||||
"has been renamed, please use [position_increment_gap] instead."));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@
|
|||
},
|
||||
"custom6":{
|
||||
"tokenizer":"standard",
|
||||
"position_offset_gap": 256
|
||||
"position_increment_gap": 256
|
||||
},
|
||||
"czechAnalyzerWithStemmer":{
|
||||
"tokenizer":"standard",
|
||||
|
|
|
@ -50,7 +50,7 @@ index :
|
|||
char_filter : [my_mapping]
|
||||
custom6 :
|
||||
tokenizer : standard
|
||||
position_offset_gap: 256
|
||||
position_increment_gap: 256
|
||||
custom7 :
|
||||
type : standard
|
||||
version: 3.6
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContentParser;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
// TODO: make this a real unit test
|
||||
public class DocumentParserTests extends ESSingleNodeTestCase {
|
||||
|
||||
public void testTypeDisabled() throws Exception {
|
||||
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.field("enabled", false).endObject().endObject().string();
|
||||
DocumentMapper mapper = mapperParser.parse(mapping);
|
||||
|
||||
BytesReference bytes = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", "1234")
|
||||
.endObject().bytes();
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", bytes);
|
||||
assertNull(doc.rootDoc().getField("field"));
|
||||
}
|
||||
|
||||
public void testFieldDisabled() throws Exception {
|
||||
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("foo").field("enabled", false).endObject()
|
||||
.startObject("bar").field("type", "integer").endObject()
|
||||
.endObject().endObject().endObject().string();
|
||||
DocumentMapper mapper = mapperParser.parse(mapping);
|
||||
|
||||
BytesReference bytes = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("foo", "1234")
|
||||
.field("bar", 10)
|
||||
.endObject().bytes();
|
||||
ParsedDocument doc = mapper.parse("test", "type", "1", bytes);
|
||||
assertNull(doc.rootDoc().getField("foo"));
|
||||
assertNotNull(doc.rootDoc().getField("bar"));
|
||||
}
|
||||
}
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
|
@ -43,7 +44,12 @@ import org.elasticsearch.index.mapper.MergeResult;
|
|||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -54,6 +60,7 @@ import static org.elasticsearch.index.mapper.core.StringFieldMapper.Builder;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -222,22 +229,22 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "string")
|
||||
.field("position_offset_gap", 1000)
|
||||
.field("position_increment_gap", 1000)
|
||||
.endObject()
|
||||
.startObject("field2")
|
||||
.field("type", "string")
|
||||
.field("position_offset_gap", 1000)
|
||||
.field("position_increment_gap", 1000)
|
||||
.field("analyzer", "standard")
|
||||
.endObject()
|
||||
.startObject("field3")
|
||||
.field("type", "string")
|
||||
.field("position_offset_gap", 1000)
|
||||
.field("position_increment_gap", 1000)
|
||||
.field("analyzer", "standard")
|
||||
.field("search_analyzer", "simple")
|
||||
.endObject()
|
||||
.startObject("field4")
|
||||
.field("type", "string")
|
||||
.field("position_offset_gap", 1000)
|
||||
.field("position_increment_gap", 1000)
|
||||
.field("analyzer", "standard")
|
||||
.field("search_analyzer", "simple")
|
||||
.field("search_quote_analyzer", "simple")
|
||||
|
@ -256,12 +263,12 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "string")
|
||||
.field("position_offset_gap", 1000)
|
||||
.field("position_increment_gap", 1000)
|
||||
.field("search_quote_analyzer", "simple")
|
||||
.endObject()
|
||||
.startObject("field2")
|
||||
.field("type", "string")
|
||||
.field("position_offset_gap", 1000)
|
||||
.field("position_increment_gap", 1000)
|
||||
.field("analyzer", "standard")
|
||||
.field("search_analyzer", "standard")
|
||||
.field("search_quote_analyzer", "simple")
|
||||
|
@ -518,4 +525,48 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase {
|
|||
assertTrue(mergeResult.buildConflicts()[0].contains("cannot enable norms"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that expected exceptions are thrown when creating a new index with position_offset_gap
|
||||
*/
|
||||
public void testPositionOffsetGapDeprecation() throws Exception {
|
||||
// test deprecation exceptions on newly created indexes
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "string")
|
||||
.field("position_increment_gap", 10)
|
||||
.endObject()
|
||||
.startObject("field2")
|
||||
.field("type", "string")
|
||||
.field("position_offset_gap", 50)
|
||||
.field("analyzer", "standard")
|
||||
.endObject().endObject().endObject().endObject().string();
|
||||
try {
|
||||
parser.parse(mapping);
|
||||
fail("Mapping definition should fail with the position_offset_gap setting");
|
||||
}catch (MapperParsingException e) {
|
||||
assertEquals(e.getMessage(), "Mapping definition for [field2] has unsupported parameters: [position_offset_gap : 50]");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test backward compatibility
|
||||
*/
|
||||
public void testBackwardCompatible() throws Exception {
|
||||
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
|
||||
Version.V_1_7_1)).build();
|
||||
|
||||
DocumentMapperParser parser = createIndex("backward_compatible_index", settings).mapperService().documentMapperParser();
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("field1")
|
||||
.field("type", "string")
|
||||
.field("position_offset_gap", 10)
|
||||
.endObject().endObject().endObject().endObject().string();
|
||||
parser.parse(mapping);
|
||||
|
||||
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"position_increment_gap\":10"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,158 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.string;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
/**
|
||||
* Tests that position_increment_gap is read from the mapper and applies as
|
||||
* expected in queries.
|
||||
*/
|
||||
public class StringFieldMapperPositionIncrementGapTests extends ESSingleNodeTestCase {
|
||||
/**
|
||||
* The default position_increment_gap should be large enough that most
|
||||
* "sensible" queries phrase slops won't match across values.
|
||||
*/
|
||||
public void testDefault() throws IOException {
|
||||
assertGapIsOneHundred(client(), "test", "test");
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the post-2.0 default is being applied.
|
||||
*/
|
||||
public static void assertGapIsOneHundred(Client client, String indexName, String type) throws IOException {
|
||||
testGap(client(), indexName, type, 100);
|
||||
|
||||
// No match across gap using default slop with default positionIncrementGap
|
||||
assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 0);
|
||||
|
||||
// Nor with small-ish values
|
||||
assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(5)).get(), 0);
|
||||
assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(50)).get(), 0);
|
||||
|
||||
// But huge-ish values still match
|
||||
assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(500)).get(), 1);
|
||||
}
|
||||
|
||||
public void testZero() throws IOException {
|
||||
setupGapInMapping(0);
|
||||
assertGapIsZero(client(), "test", "test");
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the pre-2.0 default has been applied or explicitly
|
||||
* configured.
|
||||
*/
|
||||
public static void assertGapIsZero(Client client, String indexName, String type) throws IOException {
|
||||
testGap(client, indexName, type, 0);
|
||||
/*
|
||||
* Phrases match across different values using default slop with pre-2.0 default
|
||||
* position_increment_gap.
|
||||
*/
|
||||
assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 1);
|
||||
}
|
||||
|
||||
public void testLargerThanDefault() throws IOException {
|
||||
setupGapInMapping(10000);
|
||||
testGap(client(), "test", "test", 10000);
|
||||
}
|
||||
|
||||
public void testSmallerThanDefault() throws IOException {
|
||||
setupGapInMapping(2);
|
||||
testGap(client(), "test", "test", 2);
|
||||
}
|
||||
|
||||
public void testNegativeIsError() throws IOException {
|
||||
try {
|
||||
setupGapInMapping(-1);
|
||||
fail("Expected an error");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(ExceptionsHelper.detailedMessage(e), containsString("positions_increment_gap less than 0 aren't allowed"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests that the default actually defaults to the position_increment_gap
|
||||
* configured in the analyzer. This behavior is very old and a little
|
||||
* strange but not worth breaking some thought.
|
||||
*/
|
||||
public void testDefaultDefaultsToAnalyzer() throws IOException {
|
||||
XContentBuilder settings = XContentFactory.jsonBuilder().startObject().startObject("analysis").startObject("analyzer")
|
||||
.startObject("gappy");
|
||||
settings.field("type", "custom");
|
||||
settings.field("tokenizer", "standard");
|
||||
settings.field("position_increment_gap", 2);
|
||||
setupAnalyzer(settings, "gappy");
|
||||
testGap(client(), "test", "test", 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an index named "test" with a field named "string" with the provided
|
||||
* positionIncrementGap that uses the standard analyzer.
|
||||
*/
|
||||
private void setupGapInMapping(int positionIncrementGap) throws IOException {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("string");
|
||||
mapping.field("type", "string");
|
||||
mapping.field("position_increment_gap", positionIncrementGap);
|
||||
client().admin().indices().prepareCreate("test").addMapping("test", mapping).get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Build an index named "test" with the provided settings and and a field
|
||||
* named "string" that uses the specified analyzer and default
|
||||
* position_increment_gap.
|
||||
*/
|
||||
private void setupAnalyzer(XContentBuilder settings, String analyzer) throws IOException {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("string");
|
||||
mapping.field("type", "string");
|
||||
mapping.field("analyzer", analyzer);
|
||||
client().admin().indices().prepareCreate("test").addMapping("test", mapping).setSettings(settings).get();
|
||||
}
|
||||
|
||||
private static void testGap(Client client, String indexName, String type, int positionIncrementGap) throws IOException {
|
||||
client.prepareIndex(indexName, type, "position_gap_test").setSource("string", ImmutableList.of("one", "two three")).setRefresh(true).get();
|
||||
|
||||
// Baseline - phrase query finds matches in the same field value
|
||||
assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "two three")).get(), 1);
|
||||
|
||||
if (positionIncrementGap > 0) {
|
||||
// No match across gaps when slop < position gap
|
||||
assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap - 1)).get(),
|
||||
0);
|
||||
}
|
||||
|
||||
// Match across gaps when slop >= position gap
|
||||
assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap)).get(), 1);
|
||||
assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap + 1)).get(), 1);
|
||||
}
|
||||
}
|
|
@ -2452,7 +2452,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
Query parsedQuery = queryParser.parse(query).query();
|
||||
assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class));
|
||||
assertThat(((ConstantScoreQuery) parsedQuery).getQuery(), instanceOf(ToParentBlockJoinQuery.class));
|
||||
assertThat(((ConstantScoreQuery) parsedQuery).getQuery().toString(), equalTo("ToParentBlockJoinQuery (+*:* #random_access(QueryWrapperFilter(_type:__nested)))"));
|
||||
assertThat(((ConstantScoreQuery) parsedQuery).getQuery().toString(), equalTo("ToParentBlockJoinQuery (+*:* #QueryWrapperFilter(_type:__nested))"));
|
||||
SearchContext.removeCurrent();
|
||||
}
|
||||
|
||||
|
|
|
@ -20,26 +20,12 @@ package org.elasticsearch.index.search.child;
|
|||
|
||||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.SlowCompositeReaderWrapper;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryUtils;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
|
@ -53,7 +39,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
|||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.TestSearchContext;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -65,11 +50,7 @@ import java.util.NavigableSet;
|
|||
import java.util.Random;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
|
||||
|
@ -119,9 +100,9 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
|
|||
|
||||
IndexReader indexReader = DirectoryReader.open(indexWriter.w, false);
|
||||
IndexSearcher searcher = new IndexSearcher(indexReader);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(
|
||||
SearchContext.current(), new Engine.Searcher(ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher)
|
||||
));
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(
|
||||
new Engine.Searcher(ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher)
|
||||
);
|
||||
|
||||
TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3))));
|
||||
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))));
|
||||
|
@ -214,7 +195,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
|
|||
Engine.Searcher engineSearcher = new Engine.Searcher(
|
||||
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
|
||||
|
||||
int max = numUniqueChildValues / 4;
|
||||
for (int i = 0; i < max; i++) {
|
||||
|
@ -243,7 +224,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
|
|||
engineSearcher = new Engine.Searcher(
|
||||
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
|
||||
}
|
||||
|
||||
String childValue = childValues[random().nextInt(numUniqueChildValues)];
|
||||
|
|
|
@ -22,34 +22,13 @@ import com.carrotsearch.hppc.FloatArrayList;
|
|||
import com.carrotsearch.hppc.IntHashSet;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.DoubleField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.SlowCompositeReaderWrapper;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.LeafCollector;
|
||||
import org.apache.lucene.search.MultiCollector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryUtils;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopScoreDocCollector;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
|
@ -65,7 +44,6 @@ import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
|||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionBuilder;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.TestSearchContext;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -73,19 +51,9 @@ import org.junit.BeforeClass;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.Random;
|
||||
import java.util.TreeMap;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.typeQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
||||
|
@ -195,7 +163,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase {
|
|||
Engine.Searcher engineSearcher = new Engine.Searcher(
|
||||
ChildrenQueryTests.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
|
||||
|
||||
int max = numUniqueChildValues / 4;
|
||||
for (int i = 0; i < max; i++) {
|
||||
|
@ -224,7 +192,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase {
|
|||
engineSearcher = new Engine.Searcher(
|
||||
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
|
||||
}
|
||||
|
||||
String childValue = childValues[random().nextInt(numUniqueChildValues)];
|
||||
|
@ -385,7 +353,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase {
|
|||
|
||||
// setup to read the parent/child map
|
||||
Engine.Searcher engineSearcher = new Engine.Searcher(ChildrenQueryTests.class.getSimpleName(), searcher);
|
||||
((TestSearchContext)context).setSearcher(new ContextIndexSearcher(context, engineSearcher));
|
||||
((TestSearchContext)context).setSearcher(engineSearcher);
|
||||
|
||||
// child query that returns the score as the value of "childScore" for each child document, with the parent's score determined by the score type
|
||||
QueryBuilder childQueryBuilder = functionScoreQuery(typeQuery("child")).add(new FieldValueFactorFunctionBuilder(CHILD_SCORE_NAME));
|
||||
|
|
|
@ -20,26 +20,12 @@ package org.elasticsearch.index.search.child;
|
|||
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.SlowCompositeReaderWrapper;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryUtils;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
|
@ -52,7 +38,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
|||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.TestSearchContext;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -64,11 +49,7 @@ import java.util.NavigableSet;
|
|||
import java.util.Random;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -173,7 +154,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTestCase {
|
|||
Engine.Searcher engineSearcher = new Engine.Searcher(
|
||||
ParentConstantScoreQuery.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
|
||||
|
||||
int max = numUniqueParentValues / 4;
|
||||
for (int i = 0; i < max; i++) {
|
||||
|
@ -200,7 +181,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTestCase {
|
|||
engineSearcher = new Engine.Searcher(
|
||||
ParentConstantScoreQueryTests.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
|
||||
}
|
||||
|
||||
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
|
||||
|
|
|
@ -21,30 +21,12 @@ package org.elasticsearch.index.search.child;
|
|||
import com.carrotsearch.hppc.FloatArrayList;
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.SlowCompositeReaderWrapper;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.LeafCollector;
|
||||
import org.apache.lucene.search.MultiCollector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryUtils;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopScoreDocCollector;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
|
@ -57,7 +39,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
|||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.TestSearchContext;
|
||||
import org.junit.AfterClass;
|
||||
|
@ -70,11 +51,7 @@ import java.util.NavigableMap;
|
|||
import java.util.Random;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
|
||||
public class ParentQueryTests extends AbstractChildTestCase {
|
||||
|
||||
|
@ -175,7 +152,7 @@ public class ParentQueryTests extends AbstractChildTestCase {
|
|||
Engine.Searcher engineSearcher = new Engine.Searcher(
|
||||
ParentQueryTests.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
|
||||
|
||||
int max = numUniqueParentValues / 4;
|
||||
for (int i = 0; i < max; i++) {
|
||||
|
@ -202,7 +179,7 @@ public class ParentQueryTests extends AbstractChildTestCase {
|
|||
engineSearcher = new Engine.Searcher(
|
||||
ParentConstantScoreQueryTests.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
|
||||
}
|
||||
|
||||
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
|
||||
|
|
|
@ -253,7 +253,7 @@ public class AnalyzeActionIT extends ESIntegTestCase {
|
|||
ensureGreen();
|
||||
|
||||
client().admin().indices().preparePutMapping("test")
|
||||
.setType("document").setSource("simple", "type=string,analyzer=simple,position_offset_gap=100").get();
|
||||
.setType("document").setSource("simple", "type=string,analyzer=simple,position_increment_gap=100").get();
|
||||
|
||||
String[] texts = new String[]{"THIS IS A TEST", "THE SECOND TEXT"};
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|||
import static org.elasticsearch.search.aggregations.AggregationBuilders.avg;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
@ -87,7 +88,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -102,7 +103,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(global("global").subAggregation(avg("avg").field("value"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -128,7 +129,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -144,7 +145,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").field("value").script(new Script("_value + 1")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -162,7 +163,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").field("value").script(new Script("_value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -174,7 +175,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").format("#").field("value")).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -191,7 +192,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").field("values"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -207,7 +208,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").field("values").script(new Script("_value + 1")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -225,7 +226,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").field("values").script(new Script("_value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -241,7 +242,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").script(new Script("doc['value'].value")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -259,7 +260,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -277,7 +278,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -293,7 +294,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -309,7 +310,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
.addAggregation(avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
@ -328,7 +329,7 @@ public class AvgIT extends AbstractNumericTestCase {
|
|||
avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
|
|
|
@ -34,6 +34,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|||
import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
@ -121,7 +122,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.addAggregation(extendedStats("stats").field("value").sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -147,7 +148,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.addAggregation(extendedStats("stats").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -168,7 +169,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(extendedStats("stats").format("0000.0").field("value").sigma(sigma)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -199,7 +200,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(global("global").subAggregation(extendedStats("stats").field("value"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -249,7 +250,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.addAggregation(extendedStats("stats").field("value").sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -274,7 +275,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.addAggregation(extendedStats("stats").field("value").script(new Script("_value + 1")).sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -303,7 +304,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -328,7 +329,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.addAggregation(extendedStats("stats").field("values").sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -353,7 +354,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.addAggregation(extendedStats("stats").field("values").script(new Script("_value - 1")).sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -382,7 +383,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -407,7 +408,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.addAggregation(extendedStats("stats").script(new Script("doc['value'].value")).sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -435,7 +436,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
extendedStats("stats").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)).sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -463,7 +464,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
extendedStats("stats").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)).sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -488,7 +489,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.addAggregation(extendedStats("stats").script(new Script("doc['values'].values")).sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -514,7 +515,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.execute().actionGet();
|
||||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -545,7 +546,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
|
|||
.sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
|
|
@ -41,6 +41,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.percentileRanks;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
@ -166,7 +167,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.field("value").percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
|
||||
|
@ -185,7 +186,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.field("value").percentiles(pcts))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -212,7 +213,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.field("value").percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
|
||||
|
@ -230,7 +231,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.field("value").percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
|
||||
|
@ -248,7 +249,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.field("value").script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
|
@ -269,7 +270,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.field("value").script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
|
@ -287,7 +288,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.field("values").percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
|
@ -305,7 +306,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.field("values").script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
|
||||
|
@ -322,7 +323,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.field("values").script(new Script("20 - _value")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, 20 - maxValues, 20 - minValues, sigDigits);
|
||||
|
@ -343,7 +344,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.field("values").script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
|
||||
|
@ -361,7 +362,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.script(new Script("doc['value'].value")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
|
||||
|
@ -382,7 +383,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
|
@ -403,7 +404,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
|
@ -421,7 +422,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
|
@ -439,7 +440,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
|
@ -463,7 +464,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
"List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;",
|
||||
ScriptType.INLINE, null, params)).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
|
||||
|
@ -483,7 +484,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.numberOfSignificantValueDigits(sigDigits).percentiles(99))
|
||||
.order(Order.aggregation("percentile_ranks", "99", asc))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Histogram histo = searchResponse.getAggregations().get("histo");
|
||||
double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
|
||||
|
|
|
@ -41,6 +41,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.percentiles;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
|
@ -167,7 +168,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
|
||||
|
@ -187,7 +188,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
.field("value")
|
||||
.percentiles(pcts))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -216,7 +217,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
|
||||
|
@ -234,7 +235,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("value")
|
||||
.script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
|
@ -255,7 +256,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
.script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
|
@ -274,7 +275,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
|
@ -292,7 +293,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("values")
|
||||
.script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
|
||||
|
@ -309,7 +310,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("values")
|
||||
.script(new Script("20 - _value")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, 20 - maxValues, 20 - minValues, sigDigits);
|
||||
|
@ -330,7 +331,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
.script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
|
||||
|
@ -348,7 +349,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR)
|
||||
.script(new Script("doc['value'].value")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits);
|
||||
|
@ -369,7 +370,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
.script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
|
@ -390,7 +391,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
.script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
|
@ -408,7 +409,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR)
|
||||
.script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
|
@ -426,7 +427,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR)
|
||||
.script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
|
@ -450,7 +451,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
"List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;",
|
||||
ScriptType.INLINE, null, params)).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits);
|
||||
|
@ -470,7 +471,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(99))
|
||||
.order(Order.aggregation("percentiles", "99", asc))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Histogram histo = searchResponse.getAggregations().get("histo");
|
||||
double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
|
||||
|
|
|
@ -33,6 +33,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
|
@ -86,7 +87,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -99,7 +100,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(max("max").format("0000.0").field("value")).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -115,7 +116,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(global("global").subAggregation(max("max").field("value"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -142,7 +143,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -158,7 +159,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").field("value").script(new Script("_value + 1")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -176,7 +177,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").field("value").script(new Script("_value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -192,7 +193,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").field("values"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -208,7 +209,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").field("values").script(new Script("_value + 1")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -226,7 +227,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").field("values").script(new Script("_value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -242,7 +243,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").script(new Script("doc['value'].value")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -260,7 +261,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -278,7 +279,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -294,7 +295,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").script(new Script("doc['values'].values")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -310,7 +311,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
.addAggregation(max("max").script(new Script("doc['values'].values")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
@ -328,7 +329,7 @@ public class MaxIT extends AbstractNumericTestCase {
|
|||
max("max").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
|
|
|
@ -33,6 +33,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.min;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
|
@ -86,7 +87,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
.addAggregation(min("min").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -99,7 +100,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(min("min").format("0000.0").field("value")).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -115,7 +116,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(global("global").subAggregation(min("min").field("value"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -142,7 +143,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
.addAggregation(min("min").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -158,7 +159,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
.addAggregation(min("min").field("value").script(new Script("_value - 1")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -176,7 +177,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
.addAggregation(min("min").field("value").script(new Script("_value - dec", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -192,7 +193,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
.addAggregation(min("min").field("values"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -207,7 +208,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
.setQuery(matchAllQuery())
|
||||
.addAggregation(min("min").field("values").script(new Script("_value - 1"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -222,7 +223,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(min("min").field("values").script(new Script("_value * -1"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -239,7 +240,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
.addAggregation(min("min").field("values").script(new Script("_value - dec", ScriptType.INLINE, null, params))).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -253,7 +254,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(min("min").script(new Script("doc['value'].value"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -270,7 +271,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
.addAggregation(min("min").script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -287,7 +288,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
.addAggregation(min("min").script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -301,7 +302,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(min("min").script(new Script("doc['values'].values"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -315,7 +316,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(min("min").script(new Script("doc['values'].values"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
@ -337,7 +338,7 @@ public class MinIT extends AbstractNumericTestCase {
|
|||
"List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;",
|
||||
ScriptType.INLINE, null, params))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
|
|
|
@ -34,6 +34,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.stats;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
@ -103,7 +104,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -120,7 +121,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(stats("stats").format("0000.0").field("value")).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -144,7 +145,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(global("global").subAggregation(stats("stats").field("value"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -186,7 +187,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -208,7 +209,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -232,7 +233,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -254,7 +255,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -276,7 +277,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -300,7 +301,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -322,7 +323,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -346,7 +347,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -370,7 +371,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -392,7 +393,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -414,7 +415,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
@ -440,7 +441,7 @@ public class StatsIT extends AbstractNumericTestCase {
|
|||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
|
|
|
@ -33,6 +33,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
|
@ -86,7 +87,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -99,7 +100,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").format("0000.0").field("value")).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -115,7 +116,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(global("global").subAggregation(sum("sum").field("value"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -141,7 +142,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -157,7 +158,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").field("value").script(new Script("_value + 1")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -175,7 +176,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").field("value").script(new Script("_value + increment", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -191,7 +192,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").script(new Script("doc['value'].value")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -209,7 +210,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -227,7 +228,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -244,7 +245,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -260,7 +261,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -279,7 +280,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -296,7 +297,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").field("values"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -312,7 +313,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("values").script(new Script("_value + 1"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
@ -329,7 +330,7 @@ public class SumIT extends AbstractNumericTestCase {
|
|||
.addAggregation(sum("sum").field("values").script(new Script("_value + increment", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
|
|
|
@ -41,6 +41,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.percentileRanks;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
@ -159,7 +160,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue);
|
||||
|
@ -177,7 +178,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
randomCompression(percentileRanks("percentile_ranks")).field("value").percentiles(pcts))).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -203,7 +204,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue);
|
||||
|
@ -220,7 +221,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue);
|
||||
|
@ -237,7 +238,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
|
@ -257,7 +258,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
|
@ -274,7 +275,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
|
@ -291,7 +292,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1);
|
||||
|
@ -307,7 +308,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, -maxValues, -minValues);
|
||||
|
@ -327,7 +328,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1);
|
||||
|
@ -344,7 +345,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue);
|
||||
|
@ -364,7 +365,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
|
@ -384,7 +385,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
|
@ -401,7 +402,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
|
@ -418,7 +419,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
|
@ -439,7 +440,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1);
|
||||
|
@ -456,7 +457,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
|
|||
.order(Order.aggregation("percentile_ranks", "99", asc)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Histogram histo = searchResponse.getAggregations().get("histo");
|
||||
double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
|
||||
|
|
|
@ -41,6 +41,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.percentiles;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
@ -159,7 +160,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue);
|
||||
|
@ -176,7 +177,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
global("global").subAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(pcts)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -203,7 +204,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue);
|
||||
|
@ -220,7 +221,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
|
@ -240,7 +241,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
|
@ -257,7 +258,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
|
@ -274,7 +275,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1);
|
||||
|
@ -290,7 +291,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, -maxValues, -minValues);
|
||||
|
@ -310,7 +311,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1);
|
||||
|
@ -327,7 +328,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue, maxValue);
|
||||
|
@ -347,7 +348,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
|
@ -367,7 +368,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
|
@ -384,7 +385,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
|
@ -401,7 +402,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
|
@ -422,7 +423,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1);
|
||||
|
@ -439,7 +440,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
|
|||
.order(Order.aggregation("percentiles", "99", asc)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Histogram histo = searchResponse.getAggregations().get("histo");
|
||||
double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
|
||||
|
|
|
@ -33,6 +33,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
|||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.count;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
|
@ -82,7 +83,7 @@ public class ValueCountIT extends ESIntegTestCase {
|
|||
.addAggregation(count("count").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ValueCount valueCount = searchResponse.getAggregations().get("count");
|
||||
assertThat(valueCount, notNullValue());
|
||||
|
@ -96,7 +97,7 @@ public class ValueCountIT extends ESIntegTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(global("global").subAggregation(count("count").field("value"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Global global = searchResponse.getAggregations().get("global");
|
||||
assertThat(global, notNullValue());
|
||||
|
@ -121,7 +122,7 @@ public class ValueCountIT extends ESIntegTestCase {
|
|||
.addAggregation(count("count").field("value"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ValueCount valueCount = searchResponse.getAggregations().get("count");
|
||||
assertThat(valueCount, notNullValue());
|
||||
|
@ -137,7 +138,7 @@ public class ValueCountIT extends ESIntegTestCase {
|
|||
.addAggregation(count("count").field("values"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ValueCount valueCount = searchResponse.getAggregations().get("count");
|
||||
assertThat(valueCount, notNullValue());
|
||||
|
@ -150,7 +151,7 @@ public class ValueCountIT extends ESIntegTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc['value'].value"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ValueCount valueCount = searchResponse.getAggregations().get("count");
|
||||
assertThat(valueCount, notNullValue());
|
||||
|
@ -163,7 +164,7 @@ public class ValueCountIT extends ESIntegTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc['values'].values"))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ValueCount valueCount = searchResponse.getAggregations().get("count");
|
||||
assertThat(valueCount, notNullValue());
|
||||
|
@ -178,7 +179,7 @@ public class ValueCountIT extends ESIntegTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc[s].value", ScriptType.INLINE, null, params))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ValueCount valueCount = searchResponse.getAggregations().get("count");
|
||||
assertThat(valueCount, notNullValue());
|
||||
|
@ -193,7 +194,7 @@ public class ValueCountIT extends ESIntegTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc[s].values", ScriptType.INLINE, null, params))).execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ValueCount valueCount = searchResponse.getAggregations().get("count");
|
||||
assertThat(valueCount, notNullValue());
|
||||
|
|
|
@ -545,7 +545,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
.setSize(size)
|
||||
.addSort("str_value", SortOrder.ASC)
|
||||
.execute().actionGet();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
|
||||
|
@ -558,7 +558,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
.addSort("str_value", SortOrder.DESC)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i)));
|
||||
|
@ -575,7 +575,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
.setQuery(matchAllQuery())
|
||||
.setSize(size)
|
||||
.addSort(new ScriptSortBuilder(new Script("doc['str_value'].value"), "string")).execute().actionGet();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
|
||||
|
@ -586,7 +586,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i)));
|
||||
|
@ -601,7 +601,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.ASC).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
|
||||
|
@ -611,7 +611,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.DESC).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i)));
|
||||
|
@ -625,7 +625,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.ASC).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
|
||||
|
@ -635,7 +635,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.DESC).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i)));
|
||||
|
@ -649,7 +649,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.ASC).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
|
||||
|
@ -661,7 +661,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.DESC)
|
||||
.execute().actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i)));
|
||||
|
@ -675,7 +675,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.ASC).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(i)));
|
||||
|
@ -687,7 +687,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.DESC).execute()
|
||||
.actionGet();
|
||||
assertHitCount(searchResponse, 10l);
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i)));
|
||||
|
@ -713,7 +713,7 @@ public class SimpleSortIT extends ESIntegTestCase {
|
|||
searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.DESC).execute()
|
||||
.actionGet();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(10l));
|
||||
assertHitCount(searchResponse, 10);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(size));
|
||||
for (int i = 0; i < size; i++) {
|
||||
assertThat(searchResponse.getHits().getAt(i).id(), equalTo(Integer.toString(9 - i)));
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.common.util.BigArrays;
|
|||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
|
@ -284,8 +285,8 @@ public class TestSearchContext extends SearchContext {
|
|||
return searcher;
|
||||
}
|
||||
|
||||
public void setSearcher(ContextIndexSearcher searcher) {
|
||||
this.searcher = searcher;
|
||||
public void setSearcher(Engine.Searcher searcher) {
|
||||
this.searcher = new ContextIndexSearcher(this, searcher);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -6,12 +6,13 @@
|
|||
<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
|
||||
<listEntry value="1"/>
|
||||
</listAttribute>
|
||||
<mapAttribute key="org.eclipse.debug.core.environmentVariables">
|
||||
<mapEntry key="ES_HOME" value="${target_home}"/>
|
||||
</mapAttribute>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.CLASSPATH_PROVIDER" value="org.eclipse.m2e.launchconfig.classpathProvider"/>
|
||||
<listAttribute key="org.eclipse.debug.ui.favoriteGroups">
|
||||
<listEntry value="org.eclipse.debug.ui.launchGroup.debug"/>
|
||||
<listEntry value="org.eclipse.debug.ui.launchGroup.run"/>
|
||||
</listAttribute>
|
||||
<booleanAttribute key="org.eclipse.jdt.launching.ATTR_USE_START_ON_FIRST_THREAD" value="true"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.MAIN_TYPE" value="org.elasticsearch.bootstrap.Elasticsearch"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.PROGRAM_ARGUMENTS" value="start"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="elasticsearch"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.SOURCE_PATH_PROVIDER" value="org.eclipse.m2e.launchconfig.sourcepathProvider"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Xms256m -Xmx1g -Djava.awt.headless=true -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=logs/heapdump.hprof -Delasticsearch -Des.foreground=yes -ea"/>
|
||||
<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Xms256m -Xmx1g -Djava.awt.headless=true -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=logs/heapdump.hprof -Delasticsearch -Des.foreground=yes -ea -Des.path.home=target/eclipse_run -Des.security.manager.enabled=false"/>
|
||||
</launchConfiguration>
|
|
@ -29,7 +29,7 @@ my $Issue_URL = "http://github.com/${User_Repo}issues/";
|
|||
|
||||
my @Groups = qw(
|
||||
breaking deprecation feature
|
||||
enhancement bug regression build doc test
|
||||
enhancement bug regression upgrade build doc test
|
||||
);
|
||||
my %Group_Labels = (
|
||||
breaking => 'Breaking changes',
|
||||
|
@ -39,8 +39,9 @@ my %Group_Labels = (
|
|||
feature => 'New features',
|
||||
enhancement => 'Enhancements',
|
||||
bug => 'Bug fixes',
|
||||
regression => 'Regression',
|
||||
regression => 'Regressions',
|
||||
test => 'Tests',
|
||||
upgrade => 'Upgrades',
|
||||
other => 'NOT CLASSIFIED',
|
||||
);
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
#
|
||||
# python3 ./dev-tools/prepare-release.py
|
||||
#
|
||||
# Note: Ensure the script is run from the root directory
|
||||
# Note: Ensure the script is run from the elasticsearch top level directory
|
||||
#
|
||||
|
||||
import fnmatch
|
||||
|
@ -76,6 +76,11 @@ gpgcheck=1
|
|||
gpgkey=http://packages.elastic.co/GPG-KEY-elasticsearch
|
||||
enabled=1
|
||||
|
||||
To smoke-test the release please run:
|
||||
|
||||
python3 -B ./dev-tools/smoke_tests_rc.py --version %(version)s --hash %(hash)s --plugins license,shield,watcher
|
||||
|
||||
NOTE: this script requires JAVA_HOME to point to a Java 7 Runtime
|
||||
|
||||
[1] https://github.com/elastic/elasticsearch/commit/%(hash)s
|
||||
[2] http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/zip/elasticsearch/%(version)s/elasticsearch-%(version)s.zip
|
||||
|
@ -83,12 +88,14 @@ enabled=1
|
|||
[4] http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/rpm/elasticsearch/%(version)s/elasticsearch-%(version)s.rpm
|
||||
[5] http://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/elasticsearch/distribution/deb/elasticsearch/%(version)s/elasticsearch-%(version)s.deb
|
||||
"""
|
||||
|
||||
def run(command, env_vars=None):
|
||||
VERBOSE=True
|
||||
def run(command, env_vars=None, verbose=VERBOSE):
|
||||
if env_vars:
|
||||
for key, value in env_vars.items():
|
||||
os.putenv(key, value)
|
||||
if os.system('%s' % (command)):
|
||||
if not verbose:
|
||||
command = '%s >> /dev/null 2>&1' % (command)
|
||||
if os.system(command):
|
||||
raise RuntimeError(' FAILED: %s' % (command))
|
||||
|
||||
def ensure_checkout_is_clean():
|
||||
|
@ -181,16 +188,20 @@ if __name__ == "__main__":
|
|||
help='Only runs a maven install to skip the remove deployment step')
|
||||
parser.add_argument('--gpg-key', '-k', dest='gpg_key', default="D88E42B4",
|
||||
help='Allows you to specify a different gpg_key to be used instead of the default release key')
|
||||
parser.add_argument('--verbose', '-b', dest='verbose',
|
||||
help='Runs the script in verbose mode')
|
||||
parser.set_defaults(deploy=False)
|
||||
parser.set_defaults(skip_doc_check=False)
|
||||
parser.set_defaults(push=False)
|
||||
parser.set_defaults(install_only=False)
|
||||
parser.set_defaults(verbose=False)
|
||||
args = parser.parse_args()
|
||||
install_and_deploy = args.deploy
|
||||
skip_doc_check = args.skip_doc_check
|
||||
push = args.push
|
||||
gpg_key = args.gpg_key
|
||||
install_only = args.install_only
|
||||
VERBOSE = args.verbose
|
||||
|
||||
ensure_checkout_is_clean()
|
||||
release_version = find_release_version()
|
||||
|
@ -269,5 +280,14 @@ if __name__ == "__main__":
|
|||
""")
|
||||
print('NOTE: Running s3cmd might require you to create a config file with your credentials, if the s3cmd does not support suppliying them via the command line!')
|
||||
print('*** Once the release is deployed and published send out the following mail to dev@elastic.co:')
|
||||
print(MAIL_TEMPLATE % ({'version' : release_version, 'hash': shortHash, 'major_minor_version' : major_minor_version}))
|
||||
string_format_dict = {'version' : release_version, 'hash': shortHash, 'major_minor_version' : major_minor_version}
|
||||
print(MAIL_TEMPLATE % string_format_dict)
|
||||
|
||||
print('To publish the release and the repo on S3 execute the following commands:')
|
||||
print(' s3cmd cp --recursive s3://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/repos/elasticsearch/%(major_minor_version)s/ s3://packages.elasticsearch.org/elasticsearch/%(major_minor_version)s' % string_format_dict)
|
||||
print(' s3cmd cp --recursive s3://download.elasticsearch.org/elasticsearch/staging/%(version)s-%(hash)s/org/ s3://download.elasticsearch.org/elasticsearch/release/org' % string_format_dict)
|
||||
print('Now go ahead and tag the release:')
|
||||
print(' git tag -a v%(version)s %(hash)s' % string_format_dict)
|
||||
print(' git push origin v%(version)s' % string_format_dict )
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,282 @@
|
|||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on
|
||||
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
# either express or implied. See the License for the specific
|
||||
# language governing permissions and limitations under the License.
|
||||
|
||||
# Smoke-tests a release candidate
|
||||
#
|
||||
# 1. Downloads the tar.gz, deb, RPM and zip file from the staging URL
|
||||
# 2. Verifies it's sha1 hashes and GPG signatures against the release key
|
||||
# 3. Installs all official plugins
|
||||
# 4. Starts one node for tar.gz and zip packages and checks:
|
||||
# -- if it runs with Java 1.7
|
||||
# -- if the build hash given is the one that is returned by the status response
|
||||
# -- if the build is a release version and not a snapshot version
|
||||
# -- if all plugins are loaded
|
||||
# -- if the status response returns the correct version
|
||||
#
|
||||
# USAGE:
|
||||
#
|
||||
# python3 -B ./dev-tools/smoke_test_rc.py --version 2.0.0-beta1 --hash bfa3e47
|
||||
#
|
||||
# to also test other plugins try run
|
||||
#
|
||||
# python3 -B ./dev-tools/smoke_test_rc.py --version 2.0.0-beta1 --hash bfa3e47 --plugins license,shield,watcher
|
||||
#
|
||||
# Note: Ensure the script is run from the elasticsearch top level directory
|
||||
#
|
||||
# For testing a release from sonatype try this:
|
||||
#
|
||||
# python3 -B dev-tools/smoke_test_rc.py --version 2.0.0-beta1 --hash bfa3e47 --fetch_url https://oss.sonatype.org/content/repositories/releases/
|
||||
#
|
||||
|
||||
import argparse
|
||||
import tempfile
|
||||
import os
|
||||
import signal
|
||||
import shutil
|
||||
import urllib
|
||||
import urllib.request
|
||||
import hashlib
|
||||
import time
|
||||
import socket
|
||||
import json
|
||||
import base64
|
||||
|
||||
from prepare_release_candidate import run
|
||||
from http.client import HTTPConnection
|
||||
|
||||
DEFAULT_PLUGINS = ["analysis-icu",
|
||||
"analysis-kuromoji",
|
||||
"analysis-phonetic",
|
||||
"analysis-smartcn",
|
||||
"analysis-stempel",
|
||||
"cloud-aws",
|
||||
"cloud-azure",
|
||||
"cloud-gce",
|
||||
"delete-by-query",
|
||||
"discovery-multicast",
|
||||
"lang-javascript",
|
||||
"lang-python",
|
||||
"mapper-murmur3",
|
||||
"mapper-size"]
|
||||
|
||||
try:
|
||||
JAVA_HOME = os.environ['JAVA_HOME']
|
||||
except KeyError:
|
||||
raise RuntimeError("""
|
||||
Please set JAVA_HOME in the env before running release tool
|
||||
On OSX use: export JAVA_HOME=`/usr/libexec/java_home -v '1.7*'`""")
|
||||
|
||||
def java_exe():
|
||||
path = JAVA_HOME
|
||||
return 'export JAVA_HOME="%s" PATH="%s/bin:$PATH" JAVACMD="%s/bin/java"' % (path, path, path)
|
||||
|
||||
def verify_java_version(version):
|
||||
s = os.popen('%s; java -version 2>&1' % java_exe()).read()
|
||||
if ' version "%s.' % version not in s:
|
||||
raise RuntimeError('got wrong version for java %s:\n%s' % (version, s))
|
||||
|
||||
|
||||
def sha1(file):
|
||||
with open(file, 'rb') as f:
|
||||
return hashlib.sha1(f.read()).hexdigest()
|
||||
|
||||
def read_fully(file):
|
||||
with open(file, encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def wait_for_node_startup(host='127.0.0.1', port=9200, timeout=60, header={}):
|
||||
print(' Waiting until node becomes available for at most %s seconds' % timeout)
|
||||
for _ in range(timeout):
|
||||
conn = HTTPConnection(host=host, port=port, timeout=timeout)
|
||||
try:
|
||||
time.sleep(1)
|
||||
conn.request('GET', '', headers=header)
|
||||
res = conn.getresponse()
|
||||
if res.status == 200:
|
||||
return True
|
||||
except socket.error as e:
|
||||
pass
|
||||
#that is ok it might not be there yet
|
||||
finally:
|
||||
conn.close()
|
||||
return False
|
||||
|
||||
def download_and_verify(version, hash, files, base_url, plugins=DEFAULT_PLUGINS, verbose=False):
|
||||
print('Downloading and verifying release %s from %s' % (version, base_url))
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
downloaded_files = []
|
||||
print(' ' + '*' * 80)
|
||||
for file in files:
|
||||
name = os.path.basename(file)
|
||||
print(' Smoketest file: %s' % name)
|
||||
url = '%s/%s' % (base_url, file)
|
||||
print(' Downloading %s' % (url))
|
||||
artifact_path = os.path.join(tmp_dir, file)
|
||||
downloaded_files.append(artifact_path)
|
||||
current_artifact_dir = os.path.dirname(artifact_path)
|
||||
os.makedirs(current_artifact_dir)
|
||||
urllib.request.urlretrieve(url, os.path.join(tmp_dir, file))
|
||||
sha1_url = ''.join([url, '.sha1'])
|
||||
checksum_file = artifact_path + ".sha1"
|
||||
print(' Downloading %s' % (sha1_url))
|
||||
urllib.request.urlretrieve(sha1_url, checksum_file)
|
||||
print(' Verifying checksum %s' % (checksum_file))
|
||||
expected = read_fully(checksum_file)
|
||||
actual = sha1(artifact_path)
|
||||
if expected != actual :
|
||||
raise RuntimeError('sha1 hash for %s doesn\'t match %s != %s' % (name, expected, actual))
|
||||
gpg_url = ''.join([url, '.asc'])
|
||||
gpg_file = artifact_path + ".asc"
|
||||
print(' Downloading %s' % (gpg_url))
|
||||
urllib.request.urlretrieve(gpg_url, gpg_file)
|
||||
print(' Verifying gpg signature %s' % (gpg_file))
|
||||
# here we create a temp gpg home where we download the release key as the only key into
|
||||
# when we verify the signature it will fail if the signed key is not in the keystore and that
|
||||
# way we keep the executing host unmodified since we don't have to import the key into the default keystore
|
||||
gpg_home_dir = os.path.join(current_artifact_dir, "gpg_home_dir")
|
||||
os.makedirs(gpg_home_dir, 0o700)
|
||||
run('gpg --homedir %s --keyserver pgp.mit.edu --recv-key D88E42B4' % gpg_home_dir, verbose=verbose)
|
||||
run('cd %s && gpg --homedir %s --verify %s' % (current_artifact_dir, gpg_home_dir, os.path.basename(gpg_file)), verbose=verbose)
|
||||
print(' ' + '*' * 80)
|
||||
print()
|
||||
smoke_test_release(version, downloaded_files, hash, plugins, verbose=verbose)
|
||||
print(' SUCCESS')
|
||||
finally:
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
def smoke_test_release(release, files, expected_hash, plugins, verbose=False):
|
||||
for release_file in files:
|
||||
if not os.path.isfile(release_file):
|
||||
raise RuntimeError('Smoketest failed missing file %s' % (release_file))
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
if release_file.endswith('tar.gz'):
|
||||
run('tar -xzf %s -C %s' % (release_file, tmp_dir), verbose=verbose)
|
||||
elif release_file.endswith('zip'):
|
||||
run('unzip %s -d %s' % (release_file, tmp_dir), verbose=verbose)
|
||||
else:
|
||||
print(' Skip SmokeTest for [%s]' % release_file)
|
||||
continue # nothing to do here
|
||||
es_run_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'bin/elasticsearch')
|
||||
print(' Smoke testing package [%s]' % release_file)
|
||||
es_plugin_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'bin/plugin')
|
||||
plugin_names = {}
|
||||
for plugin in plugins:
|
||||
print(' Install plugin [%s]' % (plugin))
|
||||
run('%s; %s -Des.plugins.staging=true %s %s' % (java_exe(), es_plugin_path, 'install', plugin), verbose=verbose)
|
||||
plugin_names[plugin] = True
|
||||
if 'shield' in plugin_names:
|
||||
headers = { 'Authorization' : 'Basic %s' % base64.b64encode(b"es_admin:foobar").decode("UTF-8") }
|
||||
es_shield_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'bin/shield/esusers')
|
||||
print(" Install dummy shield user")
|
||||
run('%s; %s useradd es_admin -r admin -p foobar' % (java_exe(), es_shield_path), verbose=verbose)
|
||||
else:
|
||||
headers = {}
|
||||
print(' Starting elasticsearch deamon from [%s]' % os.path.join(tmp_dir, 'elasticsearch-%s' % release))
|
||||
try:
|
||||
run('%s; %s -Des.node.name=smoke_tester -Des.cluster.name=prepare_release -Des.script.inline=on -Des.script.indexed=on -Des.repositories.url.allowed_urls=http://snapshot.test* %s -Des.pidfile=%s'
|
||||
% (java_exe(), es_run_path, '-d', os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'es-smoke.pid')), verbose=verbose)
|
||||
conn = HTTPConnection(host='127.0.0.1', port=9200, timeout=20)
|
||||
if not wait_for_node_startup(header=headers):
|
||||
print("elasticsearch logs:")
|
||||
print('*' * 80)
|
||||
logs = read_fully(os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'logs/prepare_release.log'))
|
||||
print(logs)
|
||||
print('*' * 80)
|
||||
raise RuntimeError('server didn\'t start up')
|
||||
try: # we now get / and /_nodes to fetch basic infos like hashes etc and the installed plugins
|
||||
conn.request('GET', '', headers=headers)
|
||||
res = conn.getresponse()
|
||||
if res.status == 200:
|
||||
version = json.loads(res.read().decode("utf-8"))['version']
|
||||
if release != version['number']:
|
||||
raise RuntimeError('Expected version [%s] but was [%s]' % (release, version['number']))
|
||||
if version['build_snapshot']:
|
||||
raise RuntimeError('Expected non snapshot version')
|
||||
if expected_hash.startswith(version['build_hash'].strip()):
|
||||
raise RuntimeError('HEAD hash does not match expected [%s] but got [%s]' % (expected_hash, version['build_hash']))
|
||||
print(' Verify if plugins are listed in _nodes')
|
||||
conn.request('GET', '/_nodes?plugin=true&pretty=true', headers=headers)
|
||||
res = conn.getresponse()
|
||||
if res.status == 200:
|
||||
nodes = json.loads(res.read().decode("utf-8"))['nodes']
|
||||
for _, node in nodes.items():
|
||||
node_plugins = node['plugins']
|
||||
for node_plugin in node_plugins:
|
||||
if not plugin_names.get(node_plugin['name'].strip(), False):
|
||||
raise RuntimeError('Unexpeced plugin %s' % node_plugin['name'])
|
||||
del plugin_names[node_plugin['name']]
|
||||
if plugin_names:
|
||||
raise RuntimeError('Plugins not loaded %s' % list(plugin_names.keys()))
|
||||
|
||||
else:
|
||||
raise RuntimeError('Expected HTTP 200 but got %s' % res.status)
|
||||
else:
|
||||
raise RuntimeError('Expected HTTP 200 but got %s' % res.status)
|
||||
finally:
|
||||
conn.close()
|
||||
finally:
|
||||
pid_path = os.path.join(tmp_dir, 'elasticsearch-%s' % (release), 'es-smoke.pid')
|
||||
if os.path.exists(pid_path): # try reading the pid and kill the node
|
||||
pid = int(read_fully(pid_path))
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
shutil.rmtree(tmp_dir)
|
||||
print(' ' + '*' * 80)
|
||||
print()
|
||||
|
||||
|
||||
def parse_list(string):
|
||||
return [x.strip() for x in string.split(',')]
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='SmokeTests a Release Candidate from S3 staging repo')
|
||||
parser.add_argument('--version', '-v', dest='version', default=None,
|
||||
help='The Elasticsearch Version to smoke-tests', required=True)
|
||||
parser.add_argument('--hash', '-s', dest='hash', default=None, required=True,
|
||||
help='The sha1 short hash of the git commit to smoketest')
|
||||
parser.add_argument('--plugins', '-p', dest='plugins', default=[], required=False, type=parse_list,
|
||||
help='A list of additional plugins to smoketest')
|
||||
parser.add_argument('--verbose', '-b', dest='verbose',
|
||||
help='Runs the script in verbose mode')
|
||||
parser.add_argument('--fetch_url', '-u', dest='url', default=None,
|
||||
help='Runs the script in verbose mode')
|
||||
parser.set_defaults(hash=None)
|
||||
parser.set_defaults(plugins=[])
|
||||
parser.set_defaults(version=None)
|
||||
parser.set_defaults(verbose=False)
|
||||
parser.set_defaults(url=None)
|
||||
args = parser.parse_args()
|
||||
plugins = args.plugins
|
||||
version = args.version
|
||||
hash = args.hash
|
||||
verbose = args.verbose
|
||||
url = args.url
|
||||
files = [
|
||||
'org/elasticsearch/distribution/tar/elasticsearch/2.0.0-beta1/elasticsearch-2.0.0-beta1.tar.gz',
|
||||
'org/elasticsearch/distribution/zip/elasticsearch/2.0.0-beta1/elasticsearch-2.0.0-beta1.zip',
|
||||
'org/elasticsearch/distribution/deb/elasticsearch/2.0.0-beta1/elasticsearch-2.0.0-beta1.deb',
|
||||
'org/elasticsearch/distribution/rpm/elasticsearch/2.0.0-beta1/elasticsearch-2.0.0-beta1.rpm'
|
||||
]
|
||||
verify_java_version('1.7')
|
||||
if url:
|
||||
download_url = url
|
||||
else:
|
||||
download_url = '%s/%s-%s' % ('http://download.elasticsearch.org/elasticsearch/staging', version, hash)
|
||||
download_and_verify(version, hash, files, download_url, plugins=DEFAULT_PLUGINS + plugins, verbose=verbose)
|
||||
|
||||
|
||||
|
|
@ -219,6 +219,30 @@
|
|||
|
||||
<extract-pid file="@{es.pidfile}" property="integ.pid"/>
|
||||
<echo>Shutting down external node PID ${integ.pid}</echo>
|
||||
<!-- verify with jps that this actually is the correct pid.
|
||||
See if we can find the line "pid org.elasticsearch.bootstrap.Elasticsearch" in the output of jps -l.-->
|
||||
<local name="jps.pidline"/>
|
||||
<local name="jps.executable"/>
|
||||
<local name="environment"/>
|
||||
<property environment="environment"/>
|
||||
<property name="jps.executable" location="${environment.JAVA_HOME}/bin/jps"/>
|
||||
<exec executable="${jps.executable}" failonerror="true">
|
||||
<arg value="-l"/>
|
||||
<redirector outputproperty="jps.pidline">
|
||||
<outputfilterchain>
|
||||
<linecontains>
|
||||
<contains value="${integ.pid} org.elasticsearch.bootstrap.Elasticsearch"/>
|
||||
</linecontains>
|
||||
</outputfilterchain>
|
||||
</redirector>
|
||||
</exec>
|
||||
<fail
|
||||
message="pid file at @{es.pidfile} is ${integ.pid} but jps -l did not report any process with org.elasticsearch.bootstrap.Elasticsearch and this pid.
|
||||
Did you run mvn clean? Maybe an old pid file is still lying around.">
|
||||
<condition>
|
||||
<equals arg1="${jps.pidline}" arg2=""/>
|
||||
</condition>
|
||||
</fail>
|
||||
|
||||
<exec executable="taskkill" failonerror="true" osfamily="winnt">
|
||||
<arg value="/F"/>
|
||||
|
@ -271,8 +295,6 @@
|
|||
<startup-elasticsearch/>
|
||||
</target>
|
||||
|
||||
<!-- TODO, for some more safety, add back some of the old jps logic
|
||||
and verify the pid is really an ES process! (fail otherwise) -->
|
||||
<target name="stop-external-cluster" if="integ.pidfile.exists">
|
||||
<stop-node/>
|
||||
</target>
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
d27c24204c5e507b16fec01006b3d0f1ec42aed4
|
|
@ -1 +0,0 @@
|
|||
9f2785d7184b97d005a44241ccaf980f43b9ccdb
|
|
@ -90,6 +90,15 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-eclipse-plugin</artifactId>
|
||||
<configuration>
|
||||
<!-- Many of the modules in this build have the artifactId "elasticsearch"
|
||||
which break importing into Eclipse without this. -->
|
||||
<projectNameTemplate>[groupId].[artifactId]</projectNameTemplate>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
<pluginManagement>
|
||||
|
|
|
@ -121,7 +121,10 @@ case `uname` in
|
|||
;;
|
||||
esac
|
||||
|
||||
export HOSTNAME=`hostname -s`
|
||||
# full hostname passed through cut for portability on systems that do not support hostname -s
|
||||
# export on separate line for shells that do not support combining definition and export
|
||||
HOSTNAME=`hostname | cut -d. -f1`
|
||||
export HOSTNAME
|
||||
|
||||
# manual parsing to find out, if process should be detached
|
||||
daemonized=`echo $* | grep -E -- '(^-d |-d$| -d |--daemonize$|--daemonize )'`
|
||||
|
|
|
@ -103,6 +103,9 @@ if [ -e "$CONF_FILE" ]; then
|
|||
esac
|
||||
fi
|
||||
|
||||
export HOSTNAME=`hostname -s`
|
||||
# full hostname passed through cut for portability on systems that do not support hostname -s
|
||||
# export on separate line for shells that do not support combining definition and export
|
||||
HOSTNAME=`hostname | cut -d. -f1`
|
||||
export HOSTNAME
|
||||
|
||||
eval "$JAVA" $JAVA_OPTS $ES_JAVA_OPTS -Xmx64m -Xms16m -Delasticsearch -Des.path.home="\"$ES_HOME\"" $properties -cp "\"$ES_HOME/lib/*\"" org.elasticsearch.plugins.PluginManagerCliParser $args
|
||||
|
|
|
@ -20,8 +20,10 @@ filters.
|
|||
|`char_filter` |An optional list of logical / registered name of char
|
||||
filters.
|
||||
|
||||
|`position_offset_gap` |An optional number of positions to increment
|
||||
between each field value of a field using this analyzer.
|
||||
|`position_increment_gap` |An optional number of positions to increment
|
||||
between each field value of a field using this analyzer. Defaults to 100.
|
||||
100 was chosen because it prevents phrase queries with reasonably large
|
||||
slops (less than 100) from matching terms across field values.
|
||||
|=======================================================================
|
||||
|
||||
Here is an example:
|
||||
|
@ -30,13 +32,13 @@ Here is an example:
|
|||
--------------------------------------------------
|
||||
index :
|
||||
analysis :
|
||||
analyzer :
|
||||
analyzer :
|
||||
myAnalyzer2 :
|
||||
type : custom
|
||||
tokenizer : myTokenizer1
|
||||
filter : [myTokenFilter1, myTokenFilter2]
|
||||
char_filter : [my_html]
|
||||
position_offset_gap: 256
|
||||
position_increment_gap: 256
|
||||
tokenizer :
|
||||
myTokenizer1 :
|
||||
type : standard
|
||||
|
|
|
@ -353,7 +353,23 @@ occurs, so that the document appears in search results immediately, the
|
|||
`refresh` parameter can be set to `true`. Setting this option to `true` should
|
||||
*ONLY* be done after careful thought and verification that it does not lead to
|
||||
poor performance, both from an indexing and a search standpoint. Note, getting
|
||||
a document using the get API is completely realtime.
|
||||
a document using the get API is completely realtime and doesn't require a
|
||||
refresh.
|
||||
|
||||
[float]
|
||||
[[index-noop]]
|
||||
=== Noop Updates
|
||||
|
||||
When updating a document using the index api a new version of the document is
|
||||
always created even if the document hasn't changed. If this isn't acceptable
|
||||
use the `_update` api with `detect_noop` set to true. This option isn't
|
||||
available on the index api because the index api doesn't fetch the old source
|
||||
and isn't able to compare it against the new source.
|
||||
|
||||
There isn't a hard and fast rule about when noop updates aren't acceptable.
|
||||
It's a combination of lots of factors like how frequently your data source
|
||||
sends updates that are actually noops and how many queries per second
|
||||
elasticsearch runs on the shard with receiving the updates.
|
||||
|
||||
[float]
|
||||
[[timeout]]
|
||||
|
|
|
@ -28,7 +28,7 @@ The following mapping parameters are common to some or all field datatypes:
|
|||
* <<multi-fields,`fields`>>
|
||||
* <<norms,`norms`>>
|
||||
* <<null-value,`null_value`>>
|
||||
* <<position-offset-gap,`position_offset_gap`>>
|
||||
* <<position-increment-gap,`position_increment_gap`>>
|
||||
* <<properties,`properties`>>
|
||||
* <<search-analyzer,`search_analyzer`>>
|
||||
* <<similarity,`similarity`>>
|
||||
|
@ -78,7 +78,7 @@ include::params/norms.asciidoc[]
|
|||
|
||||
include::params/null-value.asciidoc[]
|
||||
|
||||
include::params/position-offset-gap.asciidoc[]
|
||||
include::params/position-increment-gap.asciidoc[]
|
||||
|
||||
include::params/precision-step.asciidoc[]
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[[position-offset-gap]]
|
||||
=== `position_offset_gap`
|
||||
[[position-increment-gap]]
|
||||
=== `position_increment_gap`
|
||||
|
||||
<<mapping-index,Analyzed>> string fields take term <<index-options,positions>>
|
||||
into account, in order to be able to support
|
||||
|
@ -30,7 +30,7 @@ GET /my_index/groups/_search
|
|||
// AUTOSENSE
|
||||
<1> This phrase query matches our document, even though `Abraham` and `Lincoln` are in separate strings.
|
||||
|
||||
The `position_offset_gap` can introduce a fake gap between each array element. For instance:
|
||||
The `position_increment_gap` can introduce a fake gap between each array element. For instance:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
|
@ -41,7 +41,7 @@ PUT my_index
|
|||
"properties": {
|
||||
"names": {
|
||||
"type": "string",
|
||||
"position_offset_gap": 50 <1>
|
||||
"position_increment_gap": 50 <1>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ GET /my_index/groups/_search
|
|||
last term in the previous array element.
|
||||
<2> The phrase query no longer matches our document.
|
||||
|
||||
TIP: The `position_offset_gap` setting is allowed to have different settings
|
||||
TIP: The `position_increment_gap` setting is allowed to have different settings
|
||||
for fields of the same name in the same index. Its value can be updated on
|
||||
existing fields using the <<indices-put-mapping,PUT mapping API>>.
|
||||
|
|
@ -141,10 +141,15 @@ Defaults depend on the <<mapping-index,`index`>> setting:
|
|||
values. Defaults to `null`, which means the field is treated as missing.
|
||||
If the field is `analyzed`, the `null_value` will also be analyzed.
|
||||
|
||||
<<position-offset-gap,`position_offset_gap`>>::
|
||||
<<position-increment-gap,`position_increment_gap`>>::
|
||||
|
||||
The number of fake term positions which should be inserted between
|
||||
each element of an array of strings. Defaults to 0.
|
||||
The number of fake term position which should be inserted between each
|
||||
element of an array of strings. Defaults to the position_increment_gap
|
||||
configured on the analyzer which defaults to 100. 100 was chosen because it
|
||||
prevents phrase queries with reasonably large slops (less than 100) from
|
||||
matching terms across field values.
|
||||
|
||||
<<mapping-store,`store`>>::
|
||||
|
||||
|
@ -166,5 +171,3 @@ Defaults depend on the <<mapping-index,`index`>> setting:
|
|||
|
||||
Whether term vectors should be stored for an <<mapping-index,`analyzed`>>
|
||||
field. Defaults to `no`.
|
||||
|
||||
|
||||
|
|
|
@ -13,13 +13,6 @@ latest 1.x version of Elasticsearch first, in order to upgrade your indices or
|
|||
to delete the old indices. Elasticsearch will not start in the presence of old
|
||||
indices.
|
||||
|
||||
[float]
|
||||
=== Network binds to localhost only
|
||||
|
||||
Elasticsearch now binds to the loopback interface by default (usually
|
||||
`127.0.0.1` or `::1`). The `network.host` setting can be specified to change
|
||||
this behavior.
|
||||
|
||||
[float]
|
||||
=== Elasticsearch migration plugin
|
||||
|
||||
|
@ -29,6 +22,8 @@ Elasticsearch 2.0. Please install and run the plugin *before* upgrading.
|
|||
|
||||
include::migrate_2_0/removals.asciidoc[]
|
||||
|
||||
include::migrate_2_0/network.asciidoc[]
|
||||
|
||||
include::migrate_2_0/striping.asciidoc[]
|
||||
|
||||
include::migrate_2_0/mapping.asciidoc[]
|
||||
|
@ -55,4 +50,4 @@ include::migrate_2_0/settings.asciidoc[]
|
|||
|
||||
include::migrate_2_0/stats.asciidoc[]
|
||||
|
||||
include::migrate_2_0/java.asciidoc[]
|
||||
include::migrate_2_0/java.asciidoc[]
|
||||
|
|
|
@ -384,7 +384,15 @@ The `compress` and `compress_threshold` options have been removed from the
|
|||
default. If you would like to increase compression levels, use the new
|
||||
<<index-codec,`index.codec: best_compression`>> setting instead.
|
||||
|
||||
==== position_offset_gap
|
||||
|
||||
The `position_offset_gap` option is renamed to 'position_increment_gap'. This was
|
||||
done to clear away the confusion. Elasticsearch's 'position_increment_gap' now is
|
||||
mapped directly to Lucene's 'position_increment_gap'
|
||||
|
||||
|
||||
|
||||
The default `position_increment_gap` is now 100. Indexes created in Elasticsearch
|
||||
2.0.0 will default to using 100 and indexes created before that will continue
|
||||
to use the old default of 0. This was done to prevent phrase queries from
|
||||
matching across different values of the same term unexpectedly. Specifically,
|
||||
100 was chosen to cause phrase queries with slops up to 99 to match only within
|
||||
a single value of a field.
|
|
@ -0,0 +1,42 @@
|
|||
=== Network changes
|
||||
|
||||
==== Bind to localhost
|
||||
|
||||
Elasticsearch 2.x will only bind to localhost by default. It will try to bind
|
||||
to both 127.0.0.1 (IPv4) and [::1] (IPv6), but will work happily in
|
||||
environments where only IPv4 or IPv6 is available. This change prevents
|
||||
Elasticsearch from trying to connect to other nodes on your network unless you
|
||||
specifically tell it to do so. When moving to production you should configure
|
||||
the `network.host` parameter, either in the `elasticsearch.yml` config file or
|
||||
on the command line:
|
||||
|
||||
[source,sh]
|
||||
--------------------
|
||||
bin/elasticsearch --network.host 192.168.1.5
|
||||
bin/elasticsearch --network.host _non_loopback_
|
||||
--------------------
|
||||
|
||||
The full list of options that network.host accepts can be found in the <<modules-network>>.
|
||||
|
||||
==== Multicast removed
|
||||
|
||||
Multicast has been removed (although it is still
|
||||
{plugins}/discovery-multicast.html[provided as a plugin] for now). Instead,
|
||||
and only when bound to localhost, Elasticsearch will use unicast to contact
|
||||
the first 5 ports in the `transport.tcp.port` range, which defaults to
|
||||
`9300-9400`.
|
||||
|
||||
This preserves the zero-config auto-clustering experience for the developer,
|
||||
but it means that you will have to provide a list of <<unicast,unicast hosts>>
|
||||
when moving to production, for instance:
|
||||
|
||||
[source,yaml]
|
||||
---------------------
|
||||
discovery.zen.ping.unicast.hosts: [ 192.168.1.2, 192.168.1.3 ]
|
||||
---------------------
|
||||
|
||||
You don’t need to list all of the nodes in your cluster as unicast hosts, but
|
||||
you should specify at least a quorum (majority) of master-eligible nodes. A
|
||||
big cluster will typically have three dedicated master nodes, in which case we
|
||||
recommend listing all three of them as unicast hosts.
|
||||
|
|
@ -24,4 +24,4 @@ GET /my_index/_search?scroll=2m
|
|||
|
||||
Scroll requests sorted by `_doc` have been optimized to more efficiently resume
|
||||
from where the previous request stopped, so this will have the same performance
|
||||
characteristics as the former `scan` search type.
|
||||
characteristics as the former `scan` search type.
|
|
@ -1,6 +0,0 @@
|
|||
[[river]]
|
||||
= Rivers
|
||||
|
||||
Rivers were deprecated in Elasticsearch 1.5 and removed in Elasticsearch 2.0.
|
||||
|
||||
See https://www.elastic.co/blog/deprecating_rivers for more details.
|
|
@ -1 +0,0 @@
|
|||
9732a4e80aad23101faa442700c2172a37257c43
|
|
@ -0,0 +1 @@
|
|||
7ff51040bbcc9085dcb9a24a2c2a3cc7ac995988
|
|
@ -1 +0,0 @@
|
|||
b5dc3760021fba0ae67b4f11d37ffa52a4eac4f4
|
|
@ -0,0 +1 @@
|
|||
b0712cc659e72b9da0f5b03872d2476ab4a695f7
|
|
@ -1 +0,0 @@
|
|||
48f0aab551fa9e2eb4c81e2debf40e9fff595405
|
|
@ -0,0 +1 @@
|
|||
31afbe46b65e9933316c7e8dfb8b88dc4b37b6ba
|
|
@ -1 +0,0 @@
|
|||
891e42d29e8f9474f83c050e4ee6a4512d4f4e71
|
|
@ -0,0 +1 @@
|
|||
c9e2593fdf398c5f8906a704db037d17b2de4b2a
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
<properties>
|
||||
<elasticsearch.plugin.classname>org.elasticsearch.plugin.cloud.aws.CloudAwsPlugin</elasticsearch.plugin.classname>
|
||||
<amazonaws.version>1.10.0</amazonaws.version>
|
||||
<amazonaws.version>1.10.12</amazonaws.version>
|
||||
<tests.jvms>1</tests.jvms>
|
||||
<tests.rest.suite>cloud_aws</tests.rest.suite>
|
||||
<tests.rest.load_packaged>false</tests.rest.load_packaged>
|
||||
|
|
|
@ -34,7 +34,6 @@ import java.io.InputStream;
|
|||
import java.net.URL;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
|
||||
/**
|
||||
|
@ -579,4 +578,54 @@ public class AmazonS3Wrapper implements AmazonS3 {
|
|||
public boolean isRequesterPaysEnabled(String bucketName) throws AmazonServiceException, AmazonClientException {
|
||||
return delegate.isRequesterPaysEnabled(bucketName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ObjectListing listNextBatchOfObjects(ListNextBatchOfObjectsRequest listNextBatchOfObjectsRequest) throws AmazonClientException, AmazonServiceException {
|
||||
return delegate.listNextBatchOfObjects(listNextBatchOfObjectsRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VersionListing listNextBatchOfVersions(ListNextBatchOfVersionsRequest listNextBatchOfVersionsRequest) throws AmazonClientException, AmazonServiceException {
|
||||
return delegate.listNextBatchOfVersions(listNextBatchOfVersionsRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Owner getS3AccountOwner(GetS3AccountOwnerRequest getS3AccountOwnerRequest) throws AmazonClientException, AmazonServiceException {
|
||||
return delegate.getS3AccountOwner(getS3AccountOwnerRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BucketLoggingConfiguration getBucketLoggingConfiguration(GetBucketLoggingConfigurationRequest getBucketLoggingConfigurationRequest) throws AmazonClientException, AmazonServiceException {
|
||||
return delegate.getBucketLoggingConfiguration(getBucketLoggingConfigurationRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BucketVersioningConfiguration getBucketVersioningConfiguration(GetBucketVersioningConfigurationRequest getBucketVersioningConfigurationRequest) throws AmazonClientException, AmazonServiceException {
|
||||
return delegate.getBucketVersioningConfiguration(getBucketVersioningConfigurationRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BucketLifecycleConfiguration getBucketLifecycleConfiguration(GetBucketLifecycleConfigurationRequest getBucketLifecycleConfigurationRequest) {
|
||||
return delegate.getBucketLifecycleConfiguration(getBucketLifecycleConfigurationRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BucketCrossOriginConfiguration getBucketCrossOriginConfiguration(GetBucketCrossOriginConfigurationRequest getBucketCrossOriginConfigurationRequest) {
|
||||
return delegate.getBucketCrossOriginConfiguration(getBucketCrossOriginConfigurationRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BucketTaggingConfiguration getBucketTaggingConfiguration(GetBucketTaggingConfigurationRequest getBucketTaggingConfigurationRequest) {
|
||||
return delegate.getBucketTaggingConfiguration(getBucketTaggingConfigurationRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BucketNotificationConfiguration getBucketNotificationConfiguration(GetBucketNotificationConfigurationRequest getBucketNotificationConfigurationRequest) throws AmazonClientException, AmazonServiceException {
|
||||
return delegate.getBucketNotificationConfiguration(getBucketNotificationConfigurationRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BucketReplicationConfiguration getBucketReplicationConfiguration(GetBucketReplicationConfigurationRequest getBucketReplicationConfigurationRequest) throws AmazonServiceException, AmazonClientException {
|
||||
return delegate.getBucketReplicationConfiguration(getBucketReplicationConfigurationRequest);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"delete_by_query": {
|
||||
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete-by-query.html",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/plugins/master/plugins-delete-by-query.html",
|
||||
"methods": ["DELETE"],
|
||||
"url": {
|
||||
"path": "/{index}/_query",
|
||||
|
|
4
pom.xml
4
pom.xml
|
@ -354,7 +354,7 @@
|
|||
<artifactId>joda-time</artifactId>
|
||||
<!-- joda 2.0 moved to using volatile fields for datetime -->
|
||||
<!-- When updating to a new version, make sure to update our copy of BaseDateTime -->
|
||||
<version>2.8</version>
|
||||
<version>2.8.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.joda</groupId>
|
||||
|
@ -982,7 +982,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-eclipse-plugin</artifactId>
|
||||
<version>2.9</version>
|
||||
<version>2.10</version>
|
||||
<configuration>
|
||||
<buildOutputDirectory>eclipse-build</buildOutputDirectory>
|
||||
<downloadSources>true</downloadSources>
|
||||
|
|
|
@ -20,6 +20,10 @@
|
|||
"master_timeout": {
|
||||
"type" : "time",
|
||||
"description" : "Specify timeout for connection to master"
|
||||
},
|
||||
"update_all_types": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to update the mapping for all fields with the same name across all types or not"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -38,6 +38,10 @@
|
|||
"options" : ["open","closed","none","all"],
|
||||
"default" : "open",
|
||||
"description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both."
|
||||
},
|
||||
"update_all_types": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to update the mapping for all fields with the same name across all types or not"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"indices.shard_stores": {
|
||||
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-shard-stores.html",
|
||||
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/indices-shards-stores.html",
|
||||
"methods": ["GET"],
|
||||
"url": {
|
||||
"path": "/_shard_stores",
|
||||
|
|
Loading…
Reference in New Issue