[ML] Respect max_result_window setting in result APIs (elastic/x-pack-elasticsearch#4380)
Currently there is a hardcoded check against 10000, which is the default value of the max_result_window setting. This is a relic of the past. Removing this hardcoded validation means we respect the setting so that a user may alter it when appropriate. relates elastic/x-pack-elasticsearch#3672 Original commit: elastic/x-pack-elasticsearch@9c9c5bab89
This commit is contained in:
parent
1701934dd4
commit
5bd467eec8
|
@ -28,8 +28,6 @@ public class PageParams implements ToXContentObject, Writeable {
|
||||||
public static final ConstructingObjectParser<PageParams, Void> PARSER = new ConstructingObjectParser<>(PAGE.getPreferredName(),
|
public static final ConstructingObjectParser<PageParams, Void> PARSER = new ConstructingObjectParser<>(PAGE.getPreferredName(),
|
||||||
a -> new PageParams(a[0] == null ? DEFAULT_FROM : (int) a[0], a[1] == null ? DEFAULT_SIZE : (int) a[1]));
|
a -> new PageParams(a[0] == null ? DEFAULT_FROM : (int) a[0], a[1] == null ? DEFAULT_SIZE : (int) a[1]));
|
||||||
|
|
||||||
public static final int MAX_FROM_SIZE_SUM = 10000;
|
|
||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), FROM);
|
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), FROM);
|
||||||
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), SIZE);
|
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), SIZE);
|
||||||
|
@ -64,10 +62,6 @@ public class PageParams implements ToXContentObject, Writeable {
|
||||||
if (size < 0) {
|
if (size < 0) {
|
||||||
throw new IllegalArgumentException("Parameter [" + PageParams.SIZE.getPreferredName() + "] cannot be < 0");
|
throw new IllegalArgumentException("Parameter [" + PageParams.SIZE.getPreferredName() + "] cannot be < 0");
|
||||||
}
|
}
|
||||||
if (from + size > MAX_FROM_SIZE_SUM) {
|
|
||||||
throw new IllegalArgumentException("The sum of parameters [" + FROM.getPreferredName() + "] and ["
|
|
||||||
+ PageParams.SIZE.getPreferredName() + "] cannot be higher than " + MAX_FROM_SIZE_SUM + ".");
|
|
||||||
}
|
|
||||||
this.from = from;
|
this.from = from;
|
||||||
this.size = size;
|
this.size = size;
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,6 +54,9 @@ import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
|
||||||
refactoring to move it back to core. See DeleteJobAction for its use.
|
refactoring to move it back to core. See DeleteJobAction for its use.
|
||||||
*/
|
*/
|
||||||
public class JobStorageDeletionTask extends Task {
|
public class JobStorageDeletionTask extends Task {
|
||||||
|
|
||||||
|
private static final int MAX_SNAPSHOTS_TO_DELETE = 10000;
|
||||||
|
|
||||||
private final Logger logger;
|
private final Logger logger;
|
||||||
|
|
||||||
public JobStorageDeletionTask(long id, String type, String action, String description, TaskId parentTask, Map<String, String> headers) {
|
public JobStorageDeletionTask(long id, String type, String action, String description, TaskId parentTask, Map<String, String> headers) {
|
||||||
|
@ -147,7 +150,7 @@ public class JobStorageDeletionTask extends Task {
|
||||||
|
|
||||||
private void deleteModelState(String jobId, Client client, ActionListener<BulkResponse> listener) {
|
private void deleteModelState(String jobId, Client client, ActionListener<BulkResponse> listener) {
|
||||||
GetModelSnapshotsAction.Request request = new GetModelSnapshotsAction.Request(jobId, null);
|
GetModelSnapshotsAction.Request request = new GetModelSnapshotsAction.Request(jobId, null);
|
||||||
request.setPageParams(new PageParams(0, PageParams.MAX_FROM_SIZE_SUM));
|
request.setPageParams(new PageParams(0, MAX_SNAPSHOTS_TO_DELETE));
|
||||||
executeAsyncWithOrigin(client, ML_ORIGIN, GetModelSnapshotsAction.INSTANCE, request, ActionListener.wrap(
|
executeAsyncWithOrigin(client, ML_ORIGIN, GetModelSnapshotsAction.INSTANCE, request, ActionListener.wrap(
|
||||||
response -> {
|
response -> {
|
||||||
List<ModelSnapshot> deleteCandidates = response.getPage().results();
|
List<ModelSnapshot> deleteCandidates = response.getPage().results();
|
||||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.action;
|
||||||
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
|
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
|
||||||
import org.elasticsearch.xpack.core.ml.action.GetBucketsAction;
|
|
||||||
import org.elasticsearch.xpack.core.ml.action.GetBucketsAction.Request;
|
import org.elasticsearch.xpack.core.ml.action.GetBucketsAction.Request;
|
||||||
import org.elasticsearch.xpack.core.ml.action.util.PageParams;
|
import org.elasticsearch.xpack.core.ml.action.util.PageParams;
|
||||||
|
|
||||||
|
@ -33,9 +32,8 @@ public class GetBucketActionRequestTests extends AbstractStreamableXContentTestC
|
||||||
request.setAnomalyScore(randomDouble());
|
request.setAnomalyScore(randomDouble());
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int from = randomInt(PageParams.MAX_FROM_SIZE_SUM);
|
int from = randomInt(10000);
|
||||||
int maxSize = PageParams.MAX_FROM_SIZE_SUM - from;
|
int size = randomInt(10000);
|
||||||
int size = randomInt(maxSize);
|
|
||||||
request.setPageParams(new PageParams(from, size));
|
request.setPageParams(new PageParams(from, size));
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
|
|
|
@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.ml.action;
|
||||||
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
|
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
|
||||||
import org.elasticsearch.xpack.core.ml.action.GetCategoriesAction;
|
|
||||||
import org.elasticsearch.xpack.core.ml.action.util.PageParams;
|
import org.elasticsearch.xpack.core.ml.action.util.PageParams;
|
||||||
|
|
||||||
public class GetCategoriesRequestTests extends AbstractStreamableXContentTestCase<GetCategoriesAction.Request> {
|
public class GetCategoriesRequestTests extends AbstractStreamableXContentTestCase<GetCategoriesAction.Request> {
|
||||||
|
@ -19,9 +18,8 @@ public class GetCategoriesRequestTests extends AbstractStreamableXContentTestCas
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
request.setCategoryId(randomNonNegativeLong());
|
request.setCategoryId(randomNonNegativeLong());
|
||||||
} else {
|
} else {
|
||||||
int from = randomInt(PageParams.MAX_FROM_SIZE_SUM);
|
int from = randomInt(10000);
|
||||||
int maxSize = PageParams.MAX_FROM_SIZE_SUM - from;
|
int size = randomInt(10000);
|
||||||
int size = randomInt(maxSize);
|
|
||||||
request.setPageParams(new PageParams(from, size));
|
request.setPageParams(new PageParams(from, size));
|
||||||
}
|
}
|
||||||
return request;
|
return request;
|
||||||
|
|
|
@ -6,7 +6,6 @@
|
||||||
package org.elasticsearch.xpack.core.ml.action;
|
package org.elasticsearch.xpack.core.ml.action;
|
||||||
|
|
||||||
import org.elasticsearch.test.AbstractStreamableTestCase;
|
import org.elasticsearch.test.AbstractStreamableTestCase;
|
||||||
import org.elasticsearch.xpack.core.ml.action.GetFiltersAction;
|
|
||||||
import org.elasticsearch.xpack.core.ml.action.GetFiltersAction.Request;
|
import org.elasticsearch.xpack.core.ml.action.GetFiltersAction.Request;
|
||||||
import org.elasticsearch.xpack.core.ml.action.util.PageParams;
|
import org.elasticsearch.xpack.core.ml.action.util.PageParams;
|
||||||
|
|
||||||
|
@ -20,9 +19,8 @@ public class GetFiltersActionRequestTests extends AbstractStreamableTestCase<Get
|
||||||
request.setFilterId(randomAlphaOfLengthBetween(1, 20));
|
request.setFilterId(randomAlphaOfLengthBetween(1, 20));
|
||||||
} else {
|
} else {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int from = randomInt(PageParams.MAX_FROM_SIZE_SUM);
|
int from = randomInt(10000);
|
||||||
int maxSize = PageParams.MAX_FROM_SIZE_SUM - from;
|
int size = randomInt(10000);
|
||||||
int size = randomInt(maxSize);
|
|
||||||
request.setPageParams(new PageParams(from, size));
|
request.setPageParams(new PageParams(from, size));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,9 +41,8 @@ public class GetInfluencersActionRequestTests extends AbstractStreamableXContent
|
||||||
request.setDescending(randomBoolean());
|
request.setDescending(randomBoolean());
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int from = randomInt(PageParams.MAX_FROM_SIZE_SUM);
|
int from = randomInt(10000);
|
||||||
int maxSize = PageParams.MAX_FROM_SIZE_SUM - from;
|
int size = randomInt(10000);
|
||||||
int size = randomInt(maxSize);
|
|
||||||
request.setPageParams(new PageParams(from, size));
|
request.setPageParams(new PageParams(from, size));
|
||||||
}
|
}
|
||||||
return request;
|
return request;
|
||||||
|
|
|
@ -33,9 +33,8 @@ public class GetModelSnapshotsActionRequestTests extends AbstractStreamableXCont
|
||||||
request.setDescOrder(randomBoolean());
|
request.setDescOrder(randomBoolean());
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int from = randomInt(PageParams.MAX_FROM_SIZE_SUM);
|
int from = randomInt(10000);
|
||||||
int maxSize = PageParams.MAX_FROM_SIZE_SUM - from;
|
int size = randomInt(10000);
|
||||||
int size = randomInt(maxSize);
|
|
||||||
request.setPageParams(new PageParams(from, size));
|
request.setPageParams(new PageParams(from, size));
|
||||||
}
|
}
|
||||||
return request;
|
return request;
|
||||||
|
|
|
@ -41,9 +41,8 @@ public class GetRecordsActionRequestTests extends AbstractStreamableXContentTest
|
||||||
request.setExcludeInterim(randomBoolean());
|
request.setExcludeInterim(randomBoolean());
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int from = randomInt(PageParams.MAX_FROM_SIZE_SUM);
|
int from = randomInt(10000);
|
||||||
int maxSize = PageParams.MAX_FROM_SIZE_SUM - from;
|
int size = randomInt(10000);
|
||||||
int size = randomInt(maxSize);
|
|
||||||
request.setPageParams(new PageParams(from, size));
|
request.setPageParams(new PageParams(from, size));
|
||||||
}
|
}
|
||||||
return request;
|
return request;
|
||||||
|
|
|
@ -9,8 +9,6 @@ import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
public class PageParamsTests extends AbstractSerializingTestCase<PageParams> {
|
public class PageParamsTests extends AbstractSerializingTestCase<PageParams> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -20,9 +18,8 @@ public class PageParamsTests extends AbstractSerializingTestCase<PageParams> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected PageParams createTestInstance() {
|
protected PageParams createTestInstance() {
|
||||||
int from = randomInt(PageParams.MAX_FROM_SIZE_SUM);
|
int from = randomInt(10000);
|
||||||
int maxSize = PageParams.MAX_FROM_SIZE_SUM - from;
|
int size = randomInt(10000);
|
||||||
int size = randomInt(maxSize);
|
|
||||||
return new PageParams(from, size);
|
return new PageParams(from, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -51,44 +48,17 @@ public class PageParamsTests extends AbstractSerializingTestCase<PageParams> {
|
||||||
assertEquals("Parameter [size] cannot be < 0", e.getMessage());
|
assertEquals("Parameter [size] cannot be < 0", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testValidate_GivenFromAndSizeSumIsMoreThan10000() {
|
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new PageParams(0, 10001));
|
|
||||||
assertEquals("The sum of parameters [from] and [size] cannot be higher than 10000.", e.getMessage());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected PageParams mutateInstance(PageParams instance) throws IOException {
|
protected PageParams mutateInstance(PageParams instance) {
|
||||||
int from = instance.getFrom();
|
int from = instance.getFrom();
|
||||||
int size = instance.getSize();
|
int size = instance.getSize();
|
||||||
int amountToAdd = between(1, 20);
|
int amountToAdd = between(1, 20);
|
||||||
switch (between(0, 1)) {
|
switch (between(0, 1)) {
|
||||||
case 0:
|
case 0:
|
||||||
from += amountToAdd;
|
from += amountToAdd;
|
||||||
// If we have gone above the limit for max and size then we need to
|
|
||||||
// adjust from and size to be valid
|
|
||||||
if ((from + size) > PageParams.MAX_FROM_SIZE_SUM) {
|
|
||||||
if (from >= 2 * amountToAdd) {
|
|
||||||
// If from is large enough then just subtract the amount we added twice
|
|
||||||
from -= 2 * amountToAdd;
|
|
||||||
} else {
|
|
||||||
// Otherwise change size to obey the limit
|
|
||||||
size = PageParams.MAX_FROM_SIZE_SUM - from;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
size += amountToAdd;
|
size += amountToAdd;
|
||||||
// If we have gone above the limit for max and size then we need to
|
|
||||||
// adjust from and size to be valid
|
|
||||||
if ((from + size) > PageParams.MAX_FROM_SIZE_SUM) {
|
|
||||||
if (size >= 2 * amountToAdd) {
|
|
||||||
// If from is large enough then just subtract the amount we added twice
|
|
||||||
size -= 2 * amountToAdd;
|
|
||||||
} else {
|
|
||||||
// Otherwise change size to obey the limit
|
|
||||||
from = PageParams.MAX_FROM_SIZE_SUM - size;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new AssertionError("Illegal randomisation branch");
|
throw new AssertionError("Illegal randomisation branch");
|
||||||
|
|
Loading…
Reference in New Issue