Flush API: remove refresh flag
Refresh flag in flush is problematic, since the shards refresh is allowed to execute on is different compared to the flush shards. In order to do flush and then refresh, they should be executed as separate APIs when needed. closes #3689
This commit is contained in:
parent
4298c50119
commit
7cc48c8e87
|
@ -21,8 +21,6 @@ The flush API accepts the following request parameters:
|
|||
[cols="<,<",options="header",]
|
||||
|=======================================================================
|
||||
|Name |Description
|
||||
|`refresh` |Should a refresh be performed after the flush. Defaults to
|
||||
`false`.
|
||||
|=======================================================================
|
||||
|
||||
[float]
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.flush;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -38,10 +39,7 @@ import java.io.IOException;
|
|||
*/
|
||||
public class FlushRequest extends BroadcastOperationRequest<FlushRequest> {
|
||||
|
||||
private boolean refresh = false;
|
||||
|
||||
private boolean force = false;
|
||||
|
||||
private boolean full = false;
|
||||
|
||||
FlushRequest() {
|
||||
|
@ -56,21 +54,6 @@ public class FlushRequest extends BroadcastOperationRequest<FlushRequest> {
|
|||
super(indices);
|
||||
}
|
||||
|
||||
/**
|
||||
* Should a refresh be performed once the flush is done. Defaults to <tt>false</tt>.
|
||||
*/
|
||||
public boolean refresh() {
|
||||
return this.refresh;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should a refresh be performed once the flush is done. Defaults to <tt>false</tt>.
|
||||
*/
|
||||
public FlushRequest refresh(boolean refresh) {
|
||||
this.refresh = refresh;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should a "full" flush be performed.
|
||||
*/
|
||||
|
@ -104,7 +87,9 @@ public class FlushRequest extends BroadcastOperationRequest<FlushRequest> {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(refresh);
|
||||
if (out.getVersion().onOrBefore(Version.V_0_90_3)) {
|
||||
out.writeBoolean(false); // refresh flag
|
||||
}
|
||||
out.writeBoolean(full);
|
||||
out.writeBoolean(force);
|
||||
}
|
||||
|
@ -112,7 +97,9 @@ public class FlushRequest extends BroadcastOperationRequest<FlushRequest> {
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
refresh = in.readBoolean();
|
||||
if (in.getVersion().onOrBefore(Version.V_0_90_3)) {
|
||||
in.readBoolean(); // refresh flag
|
||||
}
|
||||
full = in.readBoolean();
|
||||
force = in.readBoolean();
|
||||
}
|
||||
|
|
|
@ -33,11 +33,6 @@ public class FlushRequestBuilder extends BroadcastOperationRequestBuilder<FlushR
|
|||
super((InternalIndicesAdminClient) indicesClient, new FlushRequest());
|
||||
}
|
||||
|
||||
public FlushRequestBuilder setRefresh(boolean refresh) {
|
||||
request.refresh(refresh);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FlushRequestBuilder setFull(boolean full) {
|
||||
request.full(full);
|
||||
return this;
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.flush;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -30,7 +31,6 @@ import java.io.IOException;
|
|||
*/
|
||||
class ShardFlushRequest extends BroadcastShardOperationRequest {
|
||||
|
||||
private boolean refresh;
|
||||
private boolean full;
|
||||
private boolean force;
|
||||
|
||||
|
@ -39,15 +39,10 @@ class ShardFlushRequest extends BroadcastShardOperationRequest {
|
|||
|
||||
public ShardFlushRequest(String index, int shardId, FlushRequest request) {
|
||||
super(index, shardId, request);
|
||||
this.refresh = request.refresh();
|
||||
this.full = request.full();
|
||||
this.force = request.force();
|
||||
}
|
||||
|
||||
public boolean refresh() {
|
||||
return this.refresh;
|
||||
}
|
||||
|
||||
public boolean full() {
|
||||
return this.full;
|
||||
}
|
||||
|
@ -59,7 +54,9 @@ class ShardFlushRequest extends BroadcastShardOperationRequest {
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
refresh = in.readBoolean();
|
||||
if (in.getVersion().onOrBefore(Version.V_0_90_3)) {
|
||||
in.readBoolean(); // refresh flag
|
||||
}
|
||||
full = in.readBoolean();
|
||||
force = in.readBoolean();
|
||||
}
|
||||
|
@ -67,7 +64,9 @@ class ShardFlushRequest extends BroadcastShardOperationRequest {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(refresh);
|
||||
if (out.getVersion().onOrBefore(Version.V_0_90_3)) {
|
||||
out.writeBoolean(false); // refresh flag
|
||||
}
|
||||
out.writeBoolean(full);
|
||||
out.writeBoolean(force);
|
||||
}
|
||||
|
|
|
@ -111,7 +111,7 @@ public class TransportFlushAction extends TransportBroadcastOperationAction<Flus
|
|||
@Override
|
||||
protected ShardFlushResponse shardOperation(ShardFlushRequest request) throws ElasticSearchException {
|
||||
IndexShard indexShard = indicesService.indexServiceSafe(request.index()).shardSafe(request.shardId());
|
||||
indexShard.flush(new Engine.Flush().refresh(request.refresh()).type(request.full() ? Engine.Flush.Type.NEW_WRITER : Engine.Flush.Type.COMMIT_TRANSLOG).force(request.force()));
|
||||
indexShard.flush(new Engine.Flush().type(request.full() ? Engine.Flush.Type.NEW_WRITER : Engine.Flush.Type.COMMIT_TRANSLOG).force(request.force()));
|
||||
return new ShardFlushResponse(request.index(), request.shardId());
|
||||
}
|
||||
|
||||
|
|
|
@ -224,28 +224,12 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
|
|||
}
|
||||
|
||||
private Type type = Type.COMMIT_TRANSLOG;
|
||||
private boolean refresh = false;
|
||||
private boolean force = false;
|
||||
/**
|
||||
* Should the flush operation wait if there is an ongoing flush operation.
|
||||
*/
|
||||
private boolean waitIfOngoing = false;
|
||||
|
||||
/**
|
||||
* Should a refresh be performed after flushing. Defaults to <tt>false</tt>.
|
||||
*/
|
||||
public boolean refresh() {
|
||||
return this.refresh;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should a refresh be performed after flushing. Defaults to <tt>false</tt>.
|
||||
*/
|
||||
public Flush refresh(boolean refresh) {
|
||||
this.refresh = refresh;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Type type() {
|
||||
return this.type;
|
||||
}
|
||||
|
@ -278,7 +262,7 @@ public interface Engine extends IndexShardComponent, CloseableComponent {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "type[" + type + "], refresh[" + refresh + "], force[" + force + "]";
|
||||
return "type[" + type + "], force[" + force + "]";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.engine;
|
||||
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -29,4 +30,9 @@ public class FlushNotAllowedEngineException extends EngineException {
|
|||
public FlushNotAllowedEngineException(ShardId shardId, String msg) {
|
||||
super(shardId, msg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
return RestStatus.SERVICE_UNAVAILABLE;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,7 +67,6 @@ public class RestFlushAction extends BaseRestHandler {
|
|||
operationThreading = BroadcastOperationThreading.THREAD_PER_SHARD;
|
||||
}
|
||||
flushRequest.operationThreading(operationThreading);
|
||||
flushRequest.refresh(request.paramAsBoolean("refresh", flushRequest.refresh()));
|
||||
flushRequest.full(request.paramAsBoolean("full", flushRequest.full()));
|
||||
flushRequest.force(request.paramAsBoolean("force", flushRequest.force()));
|
||||
client.admin().indices().flush(flushRequest, new ActionListener<FlushResponse>() {
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.google.common.collect.Iterators;
|
|||
import org.apache.lucene.util.AbstractRandomizedTest.IntegrationTests;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
|
||||
|
@ -53,6 +54,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.indices.IndexAlreadyExistsException;
|
||||
import org.elasticsearch.indices.IndexMissingException;
|
||||
import org.elasticsearch.indices.IndexTemplateMissingException;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.junit.*;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -319,10 +321,25 @@ public abstract class AbstractSharedClusterTest extends ElasticsearchTestCase {
|
|||
return actionGet;
|
||||
}
|
||||
|
||||
protected void flushAndRefresh() {
|
||||
flush(true);
|
||||
refresh();
|
||||
}
|
||||
|
||||
protected FlushResponse flush() {
|
||||
return flush(true);
|
||||
}
|
||||
|
||||
protected FlushResponse flush(boolean ignoreNotAllowed) {
|
||||
waitForRelocation();
|
||||
FlushResponse actionGet = client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
assertNoFailures(actionGet);
|
||||
FlushResponse actionGet = client().admin().indices().prepareFlush().execute().actionGet();
|
||||
if (ignoreNotAllowed) {
|
||||
for (ShardOperationFailedException failure : actionGet.getShardFailures()) {
|
||||
assertThat("unexpected flush failure " + failure.reason(), failure.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
|
||||
}
|
||||
} else {
|
||||
assertNoFailures(actionGet);
|
||||
}
|
||||
return actionGet;
|
||||
}
|
||||
|
||||
|
@ -387,7 +404,7 @@ public abstract class AbstractSharedClusterTest extends ElasticsearchTestCase {
|
|||
} else if (rarely()) {
|
||||
client().admin().indices().prepareFlush(index).execute().get();
|
||||
} else if (rarely()) {
|
||||
client().admin().indices().prepareOptimize(index).setMaxNumSegments(between(1, 10)).setFlush(random.nextBoolean()).execute().get();
|
||||
client().admin().indices().prepareOptimize(index).setMaxNumSegments(between(1, 10)).setFlush(random.nextBoolean()).execute().get();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -395,7 +412,7 @@ public abstract class AbstractSharedClusterTest extends ElasticsearchTestCase {
|
|||
assertNoFailures(client().admin().indices().prepareRefresh(index).execute().get());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void clearScroll(String... scrollIds) {
|
||||
ClearScrollResponse clearResponse = client().prepareClearScroll()
|
||||
.setScrollIds(Arrays.asList(scrollIds)).get();
|
||||
|
|
|
@ -58,17 +58,17 @@ public class SimpleNestedTests extends AbstractSharedClusterTest {
|
|||
public void simpleNested() throws Exception {
|
||||
XContentBuilder builder = jsonBuilder().
|
||||
startObject().
|
||||
field("type1").
|
||||
startObject().
|
||||
field("properties").
|
||||
startObject().
|
||||
field("nested1").
|
||||
startObject().
|
||||
field("type").
|
||||
value("nested").
|
||||
endObject().
|
||||
endObject().
|
||||
endObject().
|
||||
field("type1").
|
||||
startObject().
|
||||
field("properties").
|
||||
startObject().
|
||||
field("nested1").
|
||||
startObject().
|
||||
field("type").
|
||||
value("nested").
|
||||
endObject().
|
||||
endObject().
|
||||
endObject().
|
||||
endObject();
|
||||
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("type1", builder));
|
||||
ensureGreen();
|
||||
|
@ -227,7 +227,8 @@ public class SimpleNestedTests extends AbstractSharedClusterTest {
|
|||
assertThat(statusResponse.getIndex("test").getDocs().getNumDocs(), equalTo(total * 3l));
|
||||
|
||||
client().prepareDeleteByQuery("test").setQuery(QueryBuilders.idsQuery("type1").ids(Integer.toString(docToDelete))).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flush();
|
||||
refresh();
|
||||
statusResponse = client().admin().indices().prepareStatus().execute().actionGet();
|
||||
assertThat(statusResponse.getIndex("test").getDocs().getNumDocs(), equalTo((total * 3l) - 3));
|
||||
|
||||
|
@ -273,12 +274,15 @@ public class SimpleNestedTests extends AbstractSharedClusterTest {
|
|||
}
|
||||
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
IndicesStatusResponse statusResponse = client().admin().indices().prepareStatus().execute().actionGet();
|
||||
assertThat(statusResponse.getIndex("test").getDocs().getNumDocs(), equalTo(total));
|
||||
|
||||
client().prepareDeleteByQuery("test").setQuery(QueryBuilders.idsQuery("type1").ids(Integer.toString(docToDelete))).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flush();
|
||||
refresh();
|
||||
statusResponse = client().admin().indices().prepareStatus().execute().actionGet();
|
||||
assertThat(statusResponse.getIndex("test").getDocs().getNumDocs(), equalTo((total) - 1));
|
||||
|
||||
|
@ -408,8 +412,8 @@ public class SimpleNestedTests extends AbstractSharedClusterTest {
|
|||
.addFacet(FacetBuilders.termsStatsFacet("facet1").keyField("nested1.nested2.field2_1").valueField("nested1.nested2.field2_2").nested("nested1.nested2"))
|
||||
.addFacet(FacetBuilders.statisticalFacet("facet2").field("field2_2").nested("nested1.nested2"))
|
||||
.addFacet(FacetBuilders.statisticalFacet("facet2_blue").field("field2_2").nested("nested1.nested2")
|
||||
.facetFilter(boolFilter().must(termFilter("field2_1", "blue"))))
|
||||
.execute().actionGet();
|
||||
.facetFilter(boolFilter().must(termFilter("field2_1", "blue"))))
|
||||
.execute().actionGet();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||
|
@ -539,12 +543,14 @@ public class SimpleNestedTests extends AbstractSharedClusterTest {
|
|||
.endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flush();
|
||||
refresh();
|
||||
IndicesStatusResponse statusResponse = client().admin().indices().prepareStatus().execute().actionGet();
|
||||
assertThat(statusResponse.getIndex("test").getDocs().getNumDocs(), equalTo(6l));
|
||||
|
||||
client().prepareDeleteByQuery("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flush();
|
||||
refresh();
|
||||
statusResponse = client().admin().indices().prepareStatus().execute().actionGet();
|
||||
|
||||
// This must be 3, otherwise child docs aren't deleted.
|
||||
|
@ -605,7 +611,7 @@ public class SimpleNestedTests extends AbstractSharedClusterTest {
|
|||
.setSettings(settingsBuilder()
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 0)
|
||||
.put("index.referesh_interval", -1)
|
||||
.put("index.refresh_interval", -1)
|
||||
.build()
|
||||
)
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.search.facet;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
import org.elasticsearch.AbstractSharedClusterTest;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -41,7 +42,6 @@ import org.elasticsearch.search.facet.terms.TermsFacet.Entry;
|
|||
import org.elasticsearch.search.facet.terms.doubles.InternalDoubleTermsFacet;
|
||||
import org.elasticsearch.search.facet.terms.longs.InternalLongTermsFacet;
|
||||
import org.elasticsearch.search.facet.termsstats.TermsStatsFacet;
|
||||
import org.elasticsearch.AbstractSharedClusterTest;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
import org.junit.Test;
|
||||
|
@ -62,8 +62,8 @@ import static org.hamcrest.Matchers.*;
|
|||
*
|
||||
*/
|
||||
public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
||||
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public Settings getSettings() {
|
||||
return randomSettingsBuilder()
|
||||
|
@ -95,7 +95,8 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("tag", "green")
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
|
||||
flushAndRefresh();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("tag", "blue")
|
||||
|
@ -137,32 +138,32 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.startObject("float").field("type", "float").endObject()
|
||||
.startObject("double").field("type", "double").endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
.execute().actionGet();
|
||||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
client().prepareIndex("test", "type", ""+i).setSource(jsonBuilder().startObject()
|
||||
.field("name", ""+i)
|
||||
.field("multiValued", ""+i, "" + (90 + i%10))
|
||||
.field("byte", i )
|
||||
client().prepareIndex("test", "type", "" + i).setSource(jsonBuilder().startObject()
|
||||
.field("name", "" + i)
|
||||
.field("multiValued", "" + i, "" + (90 + i % 10))
|
||||
.field("byte", i)
|
||||
.field("short", i + Byte.MAX_VALUE)
|
||||
.field("integer", i + Short.MAX_VALUE)
|
||||
.field("long", i + Integer.MAX_VALUE)
|
||||
.field("float", (float)i)
|
||||
.field("double", (double)i)
|
||||
.field("float", (float) i)
|
||||
.field("double", (double) i)
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
client().prepareIndex("test", "type", ""+(i + 100)).setSource(jsonBuilder().startObject()
|
||||
.field("foo", ""+i)
|
||||
client().prepareIndex("test", "type", "" + (i + 100)).setSource(jsonBuilder().startObject()
|
||||
.field("foo", "" + i)
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
|
||||
String[] execHint = new String[] {"map", null};
|
||||
String[] execHint = new String[]{"map", null};
|
||||
for (String hint : execHint) {
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("double").executionHint(hint).field("double").size(10))
|
||||
|
@ -268,161 +269,165 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
@Slow
|
||||
public void testConcurrentFacets() throws ElasticSearchException, IOException, InterruptedException, ExecutionException {
|
||||
prepareCreate("test")
|
||||
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("byte").field("type", "byte").endObject()
|
||||
.startObject("short").field("type", "short").endObject()
|
||||
.startObject("integer").field("type", "integer").endObject()
|
||||
.startObject("long").field("type", "long").endObject()
|
||||
.startObject("float").field("type", "float").endObject()
|
||||
.startObject("double").field("type", "double").endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("byte").field("type", "byte").endObject()
|
||||
.startObject("short").field("type", "short").endObject()
|
||||
.startObject("integer").field("type", "integer").endObject()
|
||||
.startObject("long").field("type", "long").endObject()
|
||||
.startObject("float").field("type", "float").endObject()
|
||||
.startObject("double").field("type", "double").endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
client().prepareIndex("test", "type", ""+i).setSource(jsonBuilder().startObject()
|
||||
.field("name", ""+i)
|
||||
.field("byte", i )
|
||||
client().prepareIndex("test", "type", "" + i).setSource(jsonBuilder().startObject()
|
||||
.field("name", "" + i)
|
||||
.field("byte", i)
|
||||
.field("short", i + Byte.MAX_VALUE)
|
||||
.field("integer", i + Short.MAX_VALUE)
|
||||
.field("long", i + Integer.MAX_VALUE)
|
||||
.field("float", (float)i)
|
||||
.field("double", (double)i)
|
||||
.field("float", (float) i)
|
||||
.field("double", (double) i)
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
client().prepareIndex("test", "type", ""+(i + 100)).setSource(jsonBuilder().startObject()
|
||||
.field("foo", ""+i)
|
||||
client().prepareIndex("test", "type", "" + (i + 100)).setSource(jsonBuilder().startObject()
|
||||
.field("foo", "" + i)
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
ConcurrentDuel<Facets> duel = new ConcurrentDuel<Facets>(5);
|
||||
flushAndRefresh();
|
||||
ConcurrentDuel<Facets> duel = new ConcurrentDuel<Facets>(5);
|
||||
{
|
||||
final Client cl = client();
|
||||
|
||||
duel.duel(new ConcurrentDuel.DuelJudge<Facets>() {
|
||||
|
||||
@Override
|
||||
public void judge(Facets firstRun, Facets result) {
|
||||
for (Facet f : result) {
|
||||
TermsFacet facet = (TermsFacet) f;
|
||||
assertThat(facet.getName(), isIn(new String[] {"short", "double", "byte", "float", "integer", "long", "termFacet"}));
|
||||
TermsFacet firstRunFacet = (TermsFacet) firstRun.getFacets().get(facet.getName());
|
||||
assertThat(facet.getEntries().size(), equalTo(firstRunFacet.getEntries().size()));
|
||||
@Override
|
||||
public void judge(Facets firstRun, Facets result) {
|
||||
for (Facet f : result) {
|
||||
TermsFacet facet = (TermsFacet) f;
|
||||
assertThat(facet.getName(), isIn(new String[]{"short", "double", "byte", "float", "integer", "long", "termFacet"}));
|
||||
TermsFacet firstRunFacet = (TermsFacet) firstRun.getFacets().get(facet.getName());
|
||||
assertThat(facet.getEntries().size(), equalTo(firstRunFacet.getEntries().size()));
|
||||
|
||||
assertThat(facet.getEntries().size(), equalTo(10));
|
||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||
assertThat(facet.getEntries().size(), equalTo(10));
|
||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||
|
||||
List<? extends Entry> right = facet.getEntries();
|
||||
List<? extends Entry> left = firstRunFacet.getEntries();
|
||||
List<? extends Entry> right = facet.getEntries();
|
||||
List<? extends Entry> left = firstRunFacet.getEntries();
|
||||
|
||||
for (int i = 0; i < facet.getEntries().size(); i++) {
|
||||
assertThat(left.get(i).getTerm(), equalTo(right.get(i).getTerm()));
|
||||
assertThat(left.get(i).getCount(), equalTo(right.get(i).getCount()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}, new ConcurrentDuel.DuelExecutor<Facets>() {
|
||||
AtomicInteger count = new AtomicInteger();
|
||||
@Override
|
||||
public Facets run() {
|
||||
final SearchRequestBuilder facetRequest;
|
||||
if (count.incrementAndGet() % 2 == 0) { // every second request is mapped
|
||||
facetRequest = cl.prepareSearch().setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("double").field("double").size(10))
|
||||
.addFacet(termsFacet("float").field("float").size(10))
|
||||
.addFacet(termsFacet("integer").field("integer").size(10))
|
||||
.addFacet(termsFacet("long").field("long").size(10))
|
||||
.addFacet(termsFacet("short").field("short").size(10))
|
||||
.addFacet(termsFacet("byte").field("byte").size(10))
|
||||
.addFacet(termsFacet("termFacet").field("name").size(10));
|
||||
} else {
|
||||
facetRequest = cl.prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("double").executionHint("map").field("double").size(10))
|
||||
.addFacet(termsFacet("float").executionHint("map").field("float").size(10))
|
||||
.addFacet(termsFacet("integer").executionHint("map").field("integer").size(10))
|
||||
.addFacet(termsFacet("long").executionHint("map").field("long").size(10))
|
||||
.addFacet(termsFacet("short").executionHint("map").field("short").size(10))
|
||||
.addFacet(termsFacet("byte").executionHint("map").field("byte").size(10))
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field("name").size(10));
|
||||
}
|
||||
for (int i = 0; i < facet.getEntries().size(); i++) {
|
||||
assertThat(left.get(i).getTerm(), equalTo(right.get(i).getTerm()));
|
||||
assertThat(left.get(i).getCount(), equalTo(right.get(i).getCount()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}, new ConcurrentDuel.DuelExecutor<Facets>() {
|
||||
AtomicInteger count = new AtomicInteger();
|
||||
|
||||
SearchResponse actionGet = facetRequest.execute().actionGet();
|
||||
return actionGet.getFacets();
|
||||
}
|
||||
}, 5000);
|
||||
@Override
|
||||
public Facets run() {
|
||||
final SearchRequestBuilder facetRequest;
|
||||
if (count.incrementAndGet() % 2 == 0) { // every second request is mapped
|
||||
facetRequest = cl.prepareSearch().setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("double").field("double").size(10))
|
||||
.addFacet(termsFacet("float").field("float").size(10))
|
||||
.addFacet(termsFacet("integer").field("integer").size(10))
|
||||
.addFacet(termsFacet("long").field("long").size(10))
|
||||
.addFacet(termsFacet("short").field("short").size(10))
|
||||
.addFacet(termsFacet("byte").field("byte").size(10))
|
||||
.addFacet(termsFacet("termFacet").field("name").size(10));
|
||||
} else {
|
||||
facetRequest = cl.prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("double").executionHint("map").field("double").size(10))
|
||||
.addFacet(termsFacet("float").executionHint("map").field("float").size(10))
|
||||
.addFacet(termsFacet("integer").executionHint("map").field("integer").size(10))
|
||||
.addFacet(termsFacet("long").executionHint("map").field("long").size(10))
|
||||
.addFacet(termsFacet("short").executionHint("map").field("short").size(10))
|
||||
.addFacet(termsFacet("byte").executionHint("map").field("byte").size(10))
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field("name").size(10));
|
||||
}
|
||||
|
||||
SearchResponse actionGet = facetRequest.execute().actionGet();
|
||||
return actionGet.getFacets();
|
||||
}
|
||||
}, 5000
|
||||
);
|
||||
}
|
||||
{
|
||||
duel.duel(new ConcurrentDuel.DuelJudge<Facets>() {
|
||||
duel.duel(new ConcurrentDuel.DuelJudge<Facets>() {
|
||||
|
||||
@Override
|
||||
public void judge(Facets firstRun, Facets result) {
|
||||
for (Facet f : result) {
|
||||
TermsFacet facet = (TermsFacet) f;
|
||||
assertThat(facet.getName(), equalTo("termFacet"));
|
||||
TermsFacet firstRunFacet = (TermsFacet) firstRun.getFacets().get(facet.getName());
|
||||
assertThat(facet.getEntries().size(), equalTo(firstRunFacet.getEntries().size()));
|
||||
@Override
|
||||
public void judge(Facets firstRun, Facets result) {
|
||||
for (Facet f : result) {
|
||||
TermsFacet facet = (TermsFacet) f;
|
||||
assertThat(facet.getName(), equalTo("termFacet"));
|
||||
TermsFacet firstRunFacet = (TermsFacet) firstRun.getFacets().get(facet.getName());
|
||||
assertThat(facet.getEntries().size(), equalTo(firstRunFacet.getEntries().size()));
|
||||
|
||||
assertThat(facet.getEntries().size(), equalTo(10));
|
||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||
assertThat(facet.getEntries().size(), equalTo(10));
|
||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||
|
||||
List<? extends Entry> right = facet.getEntries();
|
||||
List<? extends Entry> left = firstRunFacet.getEntries();
|
||||
List<? extends Entry> right = facet.getEntries();
|
||||
List<? extends Entry> left = firstRunFacet.getEntries();
|
||||
|
||||
for (int i = 0; i < facet.getEntries().size(); i++) {
|
||||
assertThat(left.get(i).getTerm(), equalTo(right.get(i).getTerm()));
|
||||
assertThat(left.get(i).getCount(), equalTo(right.get(i).getCount()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}, new ConcurrentDuel.DuelExecutor<Facets>() {
|
||||
AtomicInteger count = new AtomicInteger();
|
||||
@Override
|
||||
public Facets run() {
|
||||
final SearchRequestBuilder facetRequest;
|
||||
switch(count.incrementAndGet() % 6) {
|
||||
case 4:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field("name").script("\"\" + (Integer.parseInt(term) % 100)").size(10));
|
||||
break;
|
||||
case 3:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field("name").regex("\\d+").size(10));
|
||||
break;
|
||||
case 2:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field("name").regex("\\d+").script("term").size(10));
|
||||
break;
|
||||
case 1:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field("name").regex("\\d+").script("term").size(10));
|
||||
break;
|
||||
case 0:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field("name").size(10));
|
||||
break;
|
||||
default:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field("name").size(10));
|
||||
break;
|
||||
}
|
||||
SearchResponse actionGet = facetRequest.execute().actionGet();
|
||||
return actionGet.getFacets();
|
||||
}
|
||||
}, 5000);
|
||||
for (int i = 0; i < facet.getEntries().size(); i++) {
|
||||
assertThat(left.get(i).getTerm(), equalTo(right.get(i).getTerm()));
|
||||
assertThat(left.get(i).getCount(), equalTo(right.get(i).getCount()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}, new ConcurrentDuel.DuelExecutor<Facets>() {
|
||||
AtomicInteger count = new AtomicInteger();
|
||||
|
||||
@Override
|
||||
public Facets run() {
|
||||
final SearchRequestBuilder facetRequest;
|
||||
switch (count.incrementAndGet() % 6) {
|
||||
case 4:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field("name").script("\"\" + (Integer.parseInt(term) % 100)").size(10));
|
||||
break;
|
||||
case 3:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field("name").regex("\\d+").size(10));
|
||||
break;
|
||||
case 2:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field("name").regex("\\d+").script("term").size(10));
|
||||
break;
|
||||
case 1:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field("name").regex("\\d+").script("term").size(10));
|
||||
break;
|
||||
case 0:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field("name").size(10));
|
||||
break;
|
||||
default:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field("name").size(10));
|
||||
break;
|
||||
}
|
||||
SearchResponse actionGet = facetRequest.execute().actionGet();
|
||||
return actionGet.getFacets();
|
||||
}
|
||||
}, 5000
|
||||
);
|
||||
}
|
||||
|
||||
duel.close();
|
||||
|
@ -432,148 +437,150 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
@Slow
|
||||
public void testDuelByteFieldDataImpl() throws ElasticSearchException, IOException, InterruptedException, ExecutionException {
|
||||
prepareCreate("test")
|
||||
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("name_paged")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "paged_bytes").endObject()
|
||||
.endObject()
|
||||
.startObject("name_fst")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "fst").endObject()
|
||||
.endObject()
|
||||
.startObject("name_paged_mv")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "paged_bytes").endObject()
|
||||
.endObject()
|
||||
.startObject("name_fst_mv")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "fst").endObject()
|
||||
.endObject()
|
||||
.startObject("filtered")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "fst").startObject("filter")
|
||||
.startObject("regex").field("pattern", "\\d{1,2}").endObject().endObject()
|
||||
.endObject()
|
||||
// only 1 or 2 digits
|
||||
.endObject()
|
||||
.startObject("filtered_mv")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "fst").startObject("filter")
|
||||
.startObject("regex").field("pattern", "\\d{1,2}").endObject().endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("name_paged")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "paged_bytes").endObject()
|
||||
.endObject()
|
||||
.startObject("name_fst")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "fst").endObject()
|
||||
.endObject()
|
||||
.startObject("name_paged_mv")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "paged_bytes").endObject()
|
||||
.endObject()
|
||||
.startObject("name_fst_mv")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "fst").endObject()
|
||||
.endObject()
|
||||
.startObject("filtered")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "fst").startObject("filter")
|
||||
.startObject("regex").field("pattern", "\\d{1,2}").endObject().endObject()
|
||||
.endObject()
|
||||
// only 1 or 2 digits
|
||||
.endObject()
|
||||
.startObject("filtered_mv")
|
||||
.field("type", "string")
|
||||
.startObject("fielddata").field("format", "fst").startObject("filter")
|
||||
.startObject("regex").field("pattern", "\\d{1,2}").endObject().endObject()
|
||||
.endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
client().prepareIndex("test", "type", ""+i).setSource(jsonBuilder().startObject()
|
||||
.field("name_paged", ""+i)
|
||||
.field("name_fst", ""+i)
|
||||
.field("filtered", ""+i)
|
||||
.field("name_paged_mv", ""+i,""+ Math.min(99, i+1))
|
||||
.field("name_fst_mv", ""+i,""+Math.min(99, i+1))
|
||||
.field("filtered_mv", ""+i,""+Math.min(99, i+1), ""+(100 + i))
|
||||
client().prepareIndex("test", "type", "" + i).setSource(jsonBuilder().startObject()
|
||||
.field("name_paged", "" + i)
|
||||
.field("name_fst", "" + i)
|
||||
.field("filtered", "" + i)
|
||||
.field("name_paged_mv", "" + i, "" + Math.min(99, i + 1))
|
||||
.field("name_fst_mv", "" + i, "" + Math.min(99, i + 1))
|
||||
.field("filtered_mv", "" + i, "" + Math.min(99, i + 1), "" + (100 + i))
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
client().prepareIndex("test", "type", ""+(i + 100)).setSource(jsonBuilder().startObject()
|
||||
.field("foo", ""+i)
|
||||
client().prepareIndex("test", "type", "" + (i + 100)).setSource(jsonBuilder().startObject()
|
||||
.field("foo", "" + i)
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
ConcurrentDuel<Facets> duel = new ConcurrentDuel<Facets>(5);
|
||||
String[] fieldPostFix = new String[] {"", "_mv"};
|
||||
for (final String postfix : fieldPostFix) {
|
||||
duel.duel(new ConcurrentDuel.DuelJudge<Facets>() {
|
||||
|
||||
@Override
|
||||
public void judge(Facets firstRun, Facets result) {
|
||||
for (Facet f : result) {
|
||||
TermsFacet facet = (TermsFacet) f;
|
||||
assertThat(facet.getName(), equalTo("termFacet"));
|
||||
TermsFacet firstRunFacet = (TermsFacet) firstRun.getFacets().get(facet.getName());
|
||||
assertThat(facet.getEntries().size(), equalTo(firstRunFacet.getEntries().size()));
|
||||
|
||||
if ("_mv".equals(postfix)) {
|
||||
assertThat(facet.getEntries().size(), equalTo(10));
|
||||
assertThat(facet.getTotalCount(), equalTo(199l));
|
||||
assertThat(facet.getOtherCount(), equalTo(179l));
|
||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||
} else {
|
||||
assertThat(facet.getEntries().size(), equalTo(10));
|
||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||
}
|
||||
List<? extends Entry> right = facet.getEntries();
|
||||
List<? extends Entry> left = firstRunFacet.getEntries();
|
||||
|
||||
for (int i = 0; i < facet.getEntries().size(); i++) {
|
||||
assertThat(left.get(i).getTerm(), equalTo(right.get(i).getTerm()));
|
||||
assertThat(left.get(i).getCount(), equalTo(right.get(i).getCount()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}, new ConcurrentDuel.DuelExecutor<Facets>() {
|
||||
AtomicInteger count = new AtomicInteger();
|
||||
@Override
|
||||
public Facets run() {
|
||||
final SearchRequestBuilder facetRequest;
|
||||
int incrementAndGet = count.incrementAndGet();
|
||||
final String field;
|
||||
switch (incrementAndGet % 2) {
|
||||
case 1:
|
||||
field = "filtered"+postfix;
|
||||
break;
|
||||
case 0:
|
||||
field = "name_paged"+postfix;
|
||||
break;
|
||||
default:
|
||||
field = "name_fst"+postfix;
|
||||
}
|
||||
switch(incrementAndGet % 5) {
|
||||
case 4:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field(field).script("\"\" + (Integer.parseInt(term) % 100)").size(10));
|
||||
break;
|
||||
case 3:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field(field).regex("\\d+").size(10));
|
||||
break;
|
||||
case 2:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field(field).regex("\\d+").script("term").size(10));
|
||||
break;
|
||||
case 1:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field(field).regex("\\d+").script("term").size(10));
|
||||
break;
|
||||
case 0:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field(field).size(10));
|
||||
break;
|
||||
default:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field(field).size(10));
|
||||
break;
|
||||
}
|
||||
SearchResponse actionGet = facetRequest.execute().actionGet();
|
||||
return actionGet.getFacets();
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
flushAndRefresh();
|
||||
ConcurrentDuel<Facets> duel = new ConcurrentDuel<Facets>(5);
|
||||
String[] fieldPostFix = new String[]{"", "_mv"};
|
||||
for (final String postfix : fieldPostFix) {
|
||||
duel.duel(new ConcurrentDuel.DuelJudge<Facets>() {
|
||||
|
||||
@Override
|
||||
public void judge(Facets firstRun, Facets result) {
|
||||
for (Facet f : result) {
|
||||
TermsFacet facet = (TermsFacet) f;
|
||||
assertThat(facet.getName(), equalTo("termFacet"));
|
||||
TermsFacet firstRunFacet = (TermsFacet) firstRun.getFacets().get(facet.getName());
|
||||
assertThat(facet.getEntries().size(), equalTo(firstRunFacet.getEntries().size()));
|
||||
|
||||
if ("_mv".equals(postfix)) {
|
||||
assertThat(facet.getEntries().size(), equalTo(10));
|
||||
assertThat(facet.getTotalCount(), equalTo(199l));
|
||||
assertThat(facet.getOtherCount(), equalTo(179l));
|
||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||
} else {
|
||||
assertThat(facet.getEntries().size(), equalTo(10));
|
||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||
}
|
||||
List<? extends Entry> right = facet.getEntries();
|
||||
List<? extends Entry> left = firstRunFacet.getEntries();
|
||||
|
||||
for (int i = 0; i < facet.getEntries().size(); i++) {
|
||||
assertThat(left.get(i).getTerm(), equalTo(right.get(i).getTerm()));
|
||||
assertThat(left.get(i).getCount(), equalTo(right.get(i).getCount()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}, new ConcurrentDuel.DuelExecutor<Facets>() {
|
||||
AtomicInteger count = new AtomicInteger();
|
||||
|
||||
@Override
|
||||
public Facets run() {
|
||||
final SearchRequestBuilder facetRequest;
|
||||
int incrementAndGet = count.incrementAndGet();
|
||||
final String field;
|
||||
switch (incrementAndGet % 2) {
|
||||
case 1:
|
||||
field = "filtered" + postfix;
|
||||
break;
|
||||
case 0:
|
||||
field = "name_paged" + postfix;
|
||||
break;
|
||||
default:
|
||||
field = "name_fst" + postfix;
|
||||
}
|
||||
switch (incrementAndGet % 5) {
|
||||
case 4:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field(field).script("\"\" + (Integer.parseInt(term) % 100)").size(10));
|
||||
break;
|
||||
case 3:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field(field).regex("\\d+").size(10));
|
||||
break;
|
||||
case 2:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field(field).regex("\\d+").script("term").size(10));
|
||||
break;
|
||||
case 1:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field(field).regex("\\d+").script("term").size(10));
|
||||
break;
|
||||
case 0:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").field(field).size(10));
|
||||
break;
|
||||
default:
|
||||
facetRequest = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("termFacet").executionHint("map").field(field).size(10));
|
||||
break;
|
||||
}
|
||||
SearchResponse actionGet = facetRequest.execute().actionGet();
|
||||
return actionGet.getFacets();
|
||||
}
|
||||
}, 5000
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
duel.close();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchFilter() throws Exception {
|
||||
createIndex("test");
|
||||
|
@ -584,7 +591,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("tag", "green")
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("tag", "blue")
|
||||
|
@ -637,7 +644,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.startArray("tag").value("xxx").value("yyy").endArray()
|
||||
.startArray("ltag").value(1000l).value(2000l).endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("stag", "111")
|
||||
|
@ -698,7 +705,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
client().prepareIndex("test2", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("stag", "111")
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
|
||||
for (int i = 0; i < numberOfRuns(); i++) {
|
||||
|
@ -735,7 +742,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.field("stag", "111")
|
||||
.startArray("tag").value("xxx").value("yyy").endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("stag", "111")
|
||||
|
@ -797,7 +804,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("kuku", "kuku")
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
for (int i = 0; i < numberOfRuns(); i++) {
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
|
@ -845,7 +852,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.startArray("ltag").value(1000l).value(2000l).endArray()
|
||||
.startArray("dtag").value(1000.1).value(2000.1).endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("stag", "111")
|
||||
|
@ -1286,7 +1293,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.field("num", 1)
|
||||
.startArray("multi_num").value(1.0).value(2.0f).endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("num", 2)
|
||||
|
@ -1388,7 +1395,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("num", 300)
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
for (int i = 0; i < numberOfRuns(); i++) {
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
|
@ -1432,7 +1439,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.field("date", "1970-01-01T00:00:00")
|
||||
.startArray("multi_num").value(13.0f).value(23.f).endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("num", 1065)
|
||||
|
@ -1603,7 +1610,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.startArray("multi_num").value(13.0f).value(23.f).endArray()
|
||||
.startArray("multi_value").value(10).value(11).endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("num", 1065)
|
||||
|
@ -1769,7 +1776,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.field("date", "2009-03-05T01:01:01")
|
||||
.field("num", 1)
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("date", "2009-03-05T04:01:01")
|
||||
|
@ -1883,7 +1890,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.field("date", "2009-03-05T23:31:01")
|
||||
.field("num", 1)
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject()
|
||||
.field("date", "2009-03-05T18:01:01")
|
||||
|
@ -1961,7 +1968,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.field("num", 500.0)
|
||||
.startArray("multi_num").value(5.0).value(6.0f).endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
for (int i = 0; i < numberOfRuns(); i++) {
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
|
@ -2150,7 +2157,7 @@ public class SimpleFacetsTests extends AbstractSharedClusterTest {
|
|||
.field("num", 500.0)
|
||||
.startArray("multi_num").value(5.0).value(6.0f).endArray()
|
||||
.endObject()).execute().actionGet();
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
for (int i = 0; i < numberOfRuns(); i++) {
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
|
|
|
@ -19,11 +19,11 @@
|
|||
|
||||
package org.elasticsearch.search.facet.terms;
|
||||
|
||||
import org.elasticsearch.AbstractSharedClusterTest;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.AbstractSharedClusterTest;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -71,7 +71,7 @@ public class UnmappedFieldsTermsFacetsTests extends AbstractSharedClusterTest {
|
|||
.endObject()).execute().actionGet();
|
||||
}
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("mapped").field("mapped").size(10))
|
||||
|
@ -198,7 +198,7 @@ public class UnmappedFieldsTermsFacetsTests extends AbstractSharedClusterTest {
|
|||
}
|
||||
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("mapped_idx", "unmapped_idx")
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -307,7 +307,7 @@ public class UnmappedFieldsTermsFacetsTests extends AbstractSharedClusterTest {
|
|||
.field("foo", "bar")
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -353,7 +353,7 @@ public class UnmappedFieldsTermsFacetsTests extends AbstractSharedClusterTest {
|
|||
.endObject()).execute().actionGet();
|
||||
}
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flushAndRefresh();
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addFacet(termsFacet("string").fields("mapped_str", "unmapped").size(10))
|
||||
|
|
|
@ -19,10 +19,10 @@
|
|||
|
||||
package org.elasticsearch.search.functionscore;
|
||||
|
||||
import org.elasticsearch.AbstractSharedClusterTest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.AbstractSharedClusterTest;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
@ -41,12 +41,12 @@ public class RandomScoreFunctionTests extends AbstractSharedClusterTest {
|
|||
|
||||
@Test
|
||||
public void consistentHitsWithSameSeed() throws Exception {
|
||||
final int replicas = between(0,2); // needed for green status!
|
||||
cluster().ensureAtLeastNumNodes(replicas+1);
|
||||
final int replicas = between(0, 2); // needed for green status!
|
||||
cluster().ensureAtLeastNumNodes(replicas + 1);
|
||||
assertAcked(client().admin().indices().prepareCreate("test")
|
||||
.setSettings(
|
||||
ImmutableSettings.builder().put("index.number_of_shards", between(2, 5))
|
||||
.put("index.number_of_replicas", replicas)
|
||||
.put("index.number_of_replicas", replicas)
|
||||
.build()));
|
||||
ensureGreen(); // make sure we are done otherwise preference could change?
|
||||
int docCount = atLeast(100);
|
||||
|
@ -54,6 +54,7 @@ public class RandomScoreFunctionTests extends AbstractSharedClusterTest {
|
|||
index("test", "type", "" + i, jsonBuilder().startObject().endObject());
|
||||
}
|
||||
flush();
|
||||
refresh();
|
||||
int outerIters = atLeast(10);
|
||||
for (int o = 0; o < outerIters; o++) {
|
||||
final long seed = randomLong();
|
||||
|
@ -81,7 +82,8 @@ public class RandomScoreFunctionTests extends AbstractSharedClusterTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test @Ignore
|
||||
@Test
|
||||
@Ignore
|
||||
public void distribution() throws Exception {
|
||||
int count = 10000;
|
||||
|
||||
|
@ -93,6 +95,7 @@ public class RandomScoreFunctionTests extends AbstractSharedClusterTest {
|
|||
}
|
||||
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
int[] matrix = new int[count];
|
||||
|
||||
|
@ -118,9 +121,9 @@ public class RandomScoreFunctionTests extends AbstractSharedClusterTest {
|
|||
}
|
||||
|
||||
System.out.println();
|
||||
System.out.println("max repeat: " + maxRepeat);
|
||||
System.out.println("avg repeat: " + sumRepeat / (double)filled);
|
||||
System.out.println("distribution: " + filled/(double)count);
|
||||
System.out.println("max repeat: " + maxRepeat);
|
||||
System.out.println("avg repeat: " + sumRepeat / (double) filled);
|
||||
System.out.println("distribution: " + filled / (double) count);
|
||||
|
||||
int percentile50 = filled / 2;
|
||||
int percentile25 = (filled / 4);
|
||||
|
@ -145,7 +148,7 @@ public class RandomScoreFunctionTests extends AbstractSharedClusterTest {
|
|||
percentile75--;
|
||||
}
|
||||
|
||||
System.out.println("mean: " + sum/(double)count);
|
||||
System.out.println("mean: " + sum / (double) count);
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -20,9 +20,9 @@
|
|||
package org.elasticsearch.search.sort;
|
||||
|
||||
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.elasticsearch.AbstractSharedClusterTest;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
|
@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.FilterBuilders;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.AbstractSharedClusterTest;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -55,7 +54,7 @@ import static org.hamcrest.Matchers.*;
|
|||
*
|
||||
*/
|
||||
public class SimpleSortTests extends AbstractSharedClusterTest {
|
||||
|
||||
|
||||
@Override
|
||||
public Settings getSettings() {
|
||||
return randomSettingsBuilder()
|
||||
|
@ -63,7 +62,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
.put("index.number_of_replicas", 0)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testTrackScores() throws Exception {
|
||||
createIndex("test");
|
||||
|
@ -104,7 +103,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
assertThat(hit.getScore(), not(equalTo(Float.NaN)));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void testRandomSorting() throws ElasticSearchException, IOException, InterruptedException, ExecutionException {
|
||||
int numberOfShards = between(1, 10);
|
||||
Random random = getRandom();
|
||||
|
@ -112,20 +111,20 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
.setSettings(randomSettingsBuilder().put("index.number_of_shards", numberOfShards).put("index.number_of_replicas", 0))
|
||||
.addMapping("type",
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject()
|
||||
.startObject("type")
|
||||
.startObject("properties")
|
||||
.startObject("sparse_bytes")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.endObject()
|
||||
.startObject("dense_bytes")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("sparse_bytes")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
.startObject("dense_bytes")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
ensureGreen();
|
||||
|
||||
TreeMap<BytesRef, String> sparseBytes = new TreeMap<BytesRef, String>();
|
||||
|
@ -182,7 +181,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void test3078() {
|
||||
|
@ -197,7 +196,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
assertThat(searchResponse.getHits().getAt(0).sortValues()[0].toString(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(1).sortValues()[0].toString(), equalTo("10"));
|
||||
assertThat(searchResponse.getHits().getAt(2).sortValues()[0].toString(), equalTo("100"));
|
||||
|
||||
|
||||
// reindex and refresh
|
||||
client().prepareIndex("test", "type", Integer.toString(1)).setSource("field", Integer.toString(1)).execute().actionGet();
|
||||
refresh();
|
||||
|
@ -206,7 +205,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
assertThat(searchResponse.getHits().getAt(0).sortValues()[0].toString(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(1).sortValues()[0].toString(), equalTo("10"));
|
||||
assertThat(searchResponse.getHits().getAt(2).sortValues()[0].toString(), equalTo("100"));
|
||||
|
||||
|
||||
// reindex - no refresh
|
||||
client().prepareIndex("test", "type", Integer.toString(1)).setSource("field", Integer.toString(1)).execute().actionGet();
|
||||
|
||||
|
@ -214,7 +213,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
assertThat(searchResponse.getHits().getAt(0).sortValues()[0].toString(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(1).sortValues()[0].toString(), equalTo("10"));
|
||||
assertThat(searchResponse.getHits().getAt(2).sortValues()[0].toString(), equalTo("100"));
|
||||
|
||||
|
||||
// optimize
|
||||
optimize();
|
||||
refresh();
|
||||
|
@ -224,7 +223,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
assertThat(searchResponse.getHits().getAt(0).sortValues()[0].toString(), equalTo("1"));
|
||||
assertThat(searchResponse.getHits().getAt(1).sortValues()[0].toString(), equalTo("10"));
|
||||
assertThat(searchResponse.getHits().getAt(2).sortValues()[0].toString(), equalTo("100"));
|
||||
|
||||
|
||||
refresh();
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)).execute().actionGet();
|
||||
assertThat(searchResponse.getHits().getAt(0).sortValues()[0].toString(), equalTo("1"));
|
||||
|
@ -263,7 +262,6 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1"));
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testScoreSortDirection_withFunctionScore() throws Exception {
|
||||
|
@ -328,17 +326,17 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
refresh();
|
||||
|
||||
client().prepareIndex("test", "type", "4").setSource("tag", "delta").execute().actionGet();
|
||||
|
||||
|
||||
refresh();
|
||||
client().prepareIndex("test", "type", "2").setSource("tag", "beta").execute().actionGet();
|
||||
|
||||
|
||||
refresh();
|
||||
SearchResponse resp = client().prepareSearch("test").setSize(2).setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("tag").order(SortOrder.ASC)).execute().actionGet();
|
||||
assertHitCount(resp, 4);
|
||||
assertThat(resp.getHits().hits().length, equalTo(2));
|
||||
assertFirstHit(resp, hasId("1"));
|
||||
assertSecondHit(resp, hasId("2"));
|
||||
|
||||
|
||||
resp = client().prepareSearch("test").setSize(2).setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("tag").order(SortOrder.DESC)).execute().actionGet();
|
||||
assertHitCount(resp, 4);
|
||||
assertThat(resp.getHits().hits().length, equalTo(2));
|
||||
|
@ -386,10 +384,10 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
if (random.nextInt(5) != 0) {
|
||||
refresh();
|
||||
} else {
|
||||
client().admin().indices().prepareFlush().execute().actionGet();
|
||||
client().admin().indices().prepareFlush().execute().actionGet();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
refresh();
|
||||
|
||||
|
@ -422,8 +420,8 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
}
|
||||
|
||||
assertThat(searchResponse.toString(), not(containsString("error")));
|
||||
|
||||
|
||||
|
||||
|
||||
// STRING script
|
||||
size = 1 + random.nextInt(10);
|
||||
|
||||
|
@ -642,13 +640,13 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
|
||||
assertNoFailures(searchResponse);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@Test
|
||||
public void test2920() throws IOException {
|
||||
assertAcked(prepareCreate("test").addMapping("test",
|
||||
jsonBuilder().startObject().startObject("test").startObject("properties")
|
||||
.startObject("value").field("type", "string").endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
assertAcked(prepareCreate("test").addMapping("test",
|
||||
jsonBuilder().startObject().startObject("test").startObject("properties")
|
||||
.startObject("value").field("type", "string").endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
ensureGreen();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
client().prepareIndex("test", "test", Integer.toString(i)).setSource(jsonBuilder().startObject()
|
||||
|
@ -661,7 +659,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
.execute().actionGet();
|
||||
assertNoFailures(searchResponse);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSortMinValueScript() throws IOException {
|
||||
String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
|
@ -674,28 +672,28 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
ensureGreen();
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
IndexRequestBuilder req = client().prepareIndex("test", "type1", ""+i).setSource(jsonBuilder().startObject()
|
||||
IndexRequestBuilder req = client().prepareIndex("test", "type1", "" + i).setSource(jsonBuilder().startObject()
|
||||
.field("ord", i)
|
||||
.field("svalue", new String[]{""+i, ""+(i+1), ""+(i+2)})
|
||||
.field("lvalue", new long[] {i, i+1, i+2})
|
||||
.field("dvalue", new double[] {i, i+1, i+2})
|
||||
.field("svalue", new String[]{"" + i, "" + (i + 1), "" + (i + 2)})
|
||||
.field("lvalue", new long[]{i, i + 1, i + 2})
|
||||
.field("dvalue", new double[]{i, i + 1, i + 2})
|
||||
.startObject("gvalue")
|
||||
.startObject("location")
|
||||
.field("lat", (double)i+1)
|
||||
.field("lon", (double)i)
|
||||
.endObject()
|
||||
.startObject("location")
|
||||
.field("lat", (double) i + 1)
|
||||
.field("lon", (double) i)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject());
|
||||
req.execute().actionGet();
|
||||
}
|
||||
|
||||
|
||||
for (int i = 10; i < 20; i++) { // add some docs that don't have values in those fields
|
||||
client().prepareIndex("test", "type1", ""+i).setSource(jsonBuilder().startObject()
|
||||
client().prepareIndex("test", "type1", "" + i).setSource(jsonBuilder().startObject()
|
||||
.field("ord", i)
|
||||
.endObject()).execute().actionGet();
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
client().admin().indices().prepareRefresh("test").execute().actionGet();
|
||||
|
||||
|
||||
// test the long values
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -707,7 +705,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20l));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Long)searchResponse.getHits().getAt(i).field("min").value(), equalTo((long)i));
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Long) searchResponse.getHits().getAt(i).field("min").value(), equalTo((long) i));
|
||||
}
|
||||
// test the double values
|
||||
searchResponse = client().prepareSearch()
|
||||
|
@ -720,9 +718,9 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20l));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double)searchResponse.getHits().getAt(i).field("min").value(), equalTo((double)i));
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), equalTo((double) i));
|
||||
}
|
||||
|
||||
|
||||
// test the string values
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -734,9 +732,9 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20l));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Integer)searchResponse.getHits().getAt(i).field("min").value(), equalTo(i));
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Integer) searchResponse.getHits().getAt(i).field("min").value(), equalTo(i));
|
||||
}
|
||||
|
||||
|
||||
// test the geopoint values
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -748,7 +746,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20l));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double)searchResponse.getHits().getAt(i).field("min").value(), equalTo((double)i));
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), equalTo((double) i));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -779,7 +777,8 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
.endObject()).execute().actionGet();
|
||||
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -793,7 +792,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
assertThat((String) searchResponse.getHits().getAt(0).field("id").value(), equalTo("1"));
|
||||
assertThat((String) searchResponse.getHits().getAt(1).field("id").value(), equalTo("3"));
|
||||
assertThat((String) searchResponse.getHits().getAt(2).field("id").value(), equalTo("2"));
|
||||
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addScriptField("id", "doc['id'].values[0]")
|
||||
|
@ -849,18 +848,18 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
public void testSortMissingNumbers() throws Exception {
|
||||
prepareCreate("test").addMapping("type1",
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("i_value")
|
||||
.field("type", "integer")
|
||||
.endObject()
|
||||
.startObject("d_value")
|
||||
.field("type", "float")
|
||||
.endObject()
|
||||
.startObject("i_value")
|
||||
.field("type", "integer")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
.startObject("d_value")
|
||||
.field("type", "float")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
ensureGreen();
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
.field("id", "1")
|
||||
|
@ -878,7 +877,8 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
.field("d_value", 2.2)
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
logger.info("--> sort with no missing (same as missing _last)");
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
|
@ -921,16 +921,16 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
public void testSortMissingStrings() throws ElasticSearchException, IOException {
|
||||
prepareCreate("test").addMapping("type1",
|
||||
XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("value")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.endObject()
|
||||
.startObject("value")
|
||||
.field("type", "string")
|
||||
.field("index", "not_analyzed")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()).execute().actionGet();
|
||||
ensureGreen();
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
.field("id", "1")
|
||||
|
@ -945,8 +945,11 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
.field("id", "1")
|
||||
.field("value", "c")
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
client().admin().indices().prepareFlush().setRefresh(true).execute().actionGet();try {
|
||||
|
||||
flush();
|
||||
refresh();
|
||||
|
||||
try {
|
||||
Thread.sleep(2000);
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException();
|
||||
|
@ -1014,11 +1017,11 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
|
||||
logger.info("--> sort with an unmapped field, verify it fails");
|
||||
try {
|
||||
SearchResponse result = client().prepareSearch()
|
||||
SearchResponse result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.fieldSort("kkk"))
|
||||
.execute().actionGet();
|
||||
assertThat("Expected exception but returned with", result, nullValue());
|
||||
assertThat("Expected exception but returned with", result, nullValue());
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
//we check that it's a parse failure rather than a different shard failure
|
||||
for (ShardSearchFailure shardSearchFailure : e.shardFailures()) {
|
||||
|
@ -1350,7 +1353,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
assertThat(searchResponse.getHits().getAt(2).id(), equalTo(Integer.toString(3)));
|
||||
assertThat(((Text) searchResponse.getHits().getAt(2).sortValues()[0]).string(), equalTo("03"));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSortOnRareField() throws ElasticSearchException, IOException {
|
||||
prepareCreate("test")
|
||||
|
@ -1364,9 +1367,7 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
.array("string_values", "01", "05", "10", "08")
|
||||
.endObject()).execute().actionGet();
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -1379,12 +1380,12 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
|
||||
assertThat(searchResponse.getHits().getAt(0).id(), equalTo(Integer.toString(1)));
|
||||
assertThat(((Text) searchResponse.getHits().getAt(0).sortValues()[0]).string(), equalTo("10"));
|
||||
|
||||
|
||||
client().prepareIndex("test", "type1", Integer.toString(2)).setSource(jsonBuilder().startObject()
|
||||
.array("string_values", "11", "15", "20", "07")
|
||||
.endObject()).execute().actionGet();
|
||||
for (int i = 0; i < 15; i++) {
|
||||
client().prepareIndex("test", "type1", Integer.toString(300+i)).setSource(jsonBuilder().startObject()
|
||||
client().prepareIndex("test", "type1", Integer.toString(300 + i)).setSource(jsonBuilder().startObject()
|
||||
.array("some_other_field", "foobar")
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
|
@ -1404,12 +1405,12 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
assertThat(searchResponse.getHits().getAt(1).id(), equalTo(Integer.toString(1)));
|
||||
assertThat(((Text) searchResponse.getHits().getAt(1).sortValues()[0]).string(), equalTo("10"));
|
||||
|
||||
|
||||
|
||||
client().prepareIndex("test", "type1", Integer.toString(3)).setSource(jsonBuilder().startObject()
|
||||
.array("string_values", "02", "01", "03", "!4")
|
||||
.endObject()).execute().actionGet();
|
||||
for (int i = 0; i < 15; i++) {
|
||||
client().prepareIndex("test", "type1", Integer.toString(300+i)).setSource(jsonBuilder().startObject()
|
||||
client().prepareIndex("test", "type1", Integer.toString(300 + i)).setSource(jsonBuilder().startObject()
|
||||
.array("some_other_field", "foobar")
|
||||
.endObject()).execute().actionGet();
|
||||
}
|
||||
|
@ -1431,14 +1432,14 @@ public class SimpleSortTests extends AbstractSharedClusterTest {
|
|||
|
||||
assertThat(searchResponse.getHits().getAt(2).id(), equalTo(Integer.toString(3)));
|
||||
assertThat(((Text) searchResponse.getHits().getAt(2).sortValues()[0]).string(), equalTo("03"));
|
||||
|
||||
|
||||
for (int i = 0; i < 15; i++) {
|
||||
client().prepareIndex("test", "type1", Integer.toString(300+i)).setSource(jsonBuilder().startObject()
|
||||
client().prepareIndex("test", "type1", Integer.toString(300 + i)).setSource(jsonBuilder().startObject()
|
||||
.array("some_other_field", "foobar")
|
||||
.endObject()).execute().actionGet();
|
||||
refresh();
|
||||
}
|
||||
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.setSize(3)
|
||||
|
|
Loading…
Reference in New Issue