Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
d1479dbaed
|
@ -1,5 +1,5 @@
|
|||
elasticsearch = 7.0.0-alpha1
|
||||
lucene = 7.4.0-snapshot-59f2b7aec2
|
||||
lucene = 7.4.0-snapshot-cc2ee23050
|
||||
|
||||
# optional dependencies
|
||||
spatial4j = 0.7
|
||||
|
|
|
@ -26,8 +26,6 @@ import org.elasticsearch.ElasticsearchStatusException;
|
|||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
|
@ -592,7 +590,7 @@ public class RestHighLevelClient implements Closeable {
|
|||
throw validationException;
|
||||
}
|
||||
Request req = requestConverter.apply(request);
|
||||
req.setHeaders(headers);
|
||||
addHeaders(req, headers);
|
||||
Response response;
|
||||
try {
|
||||
response = client.performRequest(req);
|
||||
|
@ -642,12 +640,19 @@ public class RestHighLevelClient implements Closeable {
|
|||
listener.onFailure(e);
|
||||
return;
|
||||
}
|
||||
req.setHeaders(headers);
|
||||
addHeaders(req, headers);
|
||||
|
||||
ResponseListener responseListener = wrapResponseListener(responseConverter, listener, ignores);
|
||||
client.performRequestAsync(req, responseListener);
|
||||
}
|
||||
|
||||
private static void addHeaders(Request request, Header... headers) {
|
||||
Objects.requireNonNull(headers, "headers cannot be null");
|
||||
for (Header header : headers) {
|
||||
request.addHeader(header.getName(), header.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
final <Resp> ResponseListener wrapResponseListener(CheckedFunction<Response, Resp, IOException> responseConverter,
|
||||
ActionListener<Resp> actionListener, Set<Integer> ignores) {
|
||||
return new ResponseListener() {
|
||||
|
|
|
@ -73,12 +73,12 @@ public class CustomRestHighLevelClientTests extends ESTestCase {
|
|||
final RestClient restClient = mock(RestClient.class);
|
||||
restHighLevelClient = new CustomRestClient(restClient);
|
||||
|
||||
doAnswer(inv -> mockPerformRequest(((Request) inv.getArguments()[0]).getHeaders()[0]))
|
||||
doAnswer(inv -> mockPerformRequest(((Request) inv.getArguments()[0]).getHeaders().iterator().next()))
|
||||
.when(restClient)
|
||||
.performRequest(any(Request.class));
|
||||
|
||||
doAnswer(inv -> mockPerformRequestAsync(
|
||||
((Request) inv.getArguments()[0]).getHeaders()[0],
|
||||
((Request) inv.getArguments()[0]).getHeaders().iterator().next(),
|
||||
(ResponseListener) inv.getArguments()[1]))
|
||||
.when(restClient)
|
||||
.performRequestAsync(any(Request.class), any(ResponseListener.class));
|
||||
|
|
|
@ -19,14 +19,17 @@
|
|||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.apache.http.nio.entity.NStringEntity;
|
||||
import org.apache.http.nio.protocol.HttpAsyncResponseConsumer;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
|
@ -36,13 +39,12 @@ import static java.util.Collections.unmodifiableMap;
|
|||
* HTTP Request to Elasticsearch.
|
||||
*/
|
||||
public final class Request {
|
||||
private static final Header[] NO_HEADERS = new Header[0];
|
||||
private final String method;
|
||||
private final String endpoint;
|
||||
private final Map<String, String> parameters = new HashMap<>();
|
||||
private final List<Header> headers = new ArrayList<>();
|
||||
|
||||
private HttpEntity entity;
|
||||
private Header[] headers = NO_HEADERS;
|
||||
private HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory =
|
||||
HttpAsyncResponseConsumerFactory.DEFAULT;
|
||||
|
||||
|
@ -125,21 +127,19 @@ public final class Request {
|
|||
}
|
||||
|
||||
/**
|
||||
* Set the headers to attach to the request.
|
||||
* Add the provided header to the request.
|
||||
*/
|
||||
public void setHeaders(Header... headers) {
|
||||
Objects.requireNonNull(headers, "headers cannot be null");
|
||||
for (Header header : headers) {
|
||||
Objects.requireNonNull(header, "header cannot be null");
|
||||
}
|
||||
this.headers = headers;
|
||||
public void addHeader(String name, String value) {
|
||||
Objects.requireNonNull(name, "header name cannot be null");
|
||||
Objects.requireNonNull(value, "header value cannot be null");
|
||||
this.headers.add(new ReqHeader(name, value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Headers to attach to the request.
|
||||
*/
|
||||
public Header[] getHeaders() {
|
||||
return headers;
|
||||
List<Header> getHeaders() {
|
||||
return Collections.unmodifiableList(headers);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -175,13 +175,13 @@ public final class Request {
|
|||
if (entity != null) {
|
||||
b.append(", entity=").append(entity);
|
||||
}
|
||||
if (headers.length > 0) {
|
||||
if (headers.size() > 0) {
|
||||
b.append(", headers=");
|
||||
for (int h = 0; h < headers.length; h++) {
|
||||
for (int h = 0; h < headers.size(); h++) {
|
||||
if (h != 0) {
|
||||
b.append(',');
|
||||
}
|
||||
b.append(headers[h].toString());
|
||||
b.append(headers.get(h).toString());
|
||||
}
|
||||
}
|
||||
if (httpAsyncResponseConsumerFactory != HttpAsyncResponseConsumerFactory.DEFAULT) {
|
||||
|
@ -204,12 +204,40 @@ public final class Request {
|
|||
&& endpoint.equals(other.endpoint)
|
||||
&& parameters.equals(other.parameters)
|
||||
&& Objects.equals(entity, other.entity)
|
||||
&& Arrays.equals(headers, other.headers)
|
||||
&& headers.equals(other.headers)
|
||||
&& httpAsyncResponseConsumerFactory.equals(other.httpAsyncResponseConsumerFactory);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(method, endpoint, parameters, entity, Arrays.hashCode(headers), httpAsyncResponseConsumerFactory);
|
||||
return Objects.hash(method, endpoint, parameters, entity, headers.hashCode(), httpAsyncResponseConsumerFactory);
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom implementation of {@link BasicHeader} that overrides equals and hashCode.
|
||||
*/
|
||||
static final class ReqHeader extends BasicHeader {
|
||||
|
||||
ReqHeader(String name, String value) {
|
||||
super(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other instanceof ReqHeader) {
|
||||
Header otherHeader = (Header) other;
|
||||
return Objects.equals(getName(), otherHeader.getName()) &&
|
||||
Objects.equals(getValue(), otherHeader.getValue());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(getName(), getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -215,7 +215,7 @@ public class RestClient implements Closeable {
|
|||
@Deprecated
|
||||
public Response performRequest(String method, String endpoint, Header... headers) throws IOException {
|
||||
Request request = new Request(method, endpoint);
|
||||
request.setHeaders(headers);
|
||||
addHeaders(request, headers);
|
||||
return performRequest(request);
|
||||
}
|
||||
|
||||
|
@ -237,7 +237,7 @@ public class RestClient implements Closeable {
|
|||
public Response performRequest(String method, String endpoint, Map<String, String> params, Header... headers) throws IOException {
|
||||
Request request = new Request(method, endpoint);
|
||||
addParameters(request, params);
|
||||
request.setHeaders(headers);
|
||||
addHeaders(request, headers);
|
||||
return performRequest(request);
|
||||
}
|
||||
|
||||
|
@ -264,7 +264,7 @@ public class RestClient implements Closeable {
|
|||
Request request = new Request(method, endpoint);
|
||||
addParameters(request, params);
|
||||
request.setEntity(entity);
|
||||
request.setHeaders(headers);
|
||||
addHeaders(request, headers);
|
||||
return performRequest(request);
|
||||
}
|
||||
|
||||
|
@ -305,7 +305,7 @@ public class RestClient implements Closeable {
|
|||
addParameters(request, params);
|
||||
request.setEntity(entity);
|
||||
request.setHttpAsyncResponseConsumerFactory(httpAsyncResponseConsumerFactory);
|
||||
request.setHeaders(headers);
|
||||
addHeaders(request, headers);
|
||||
return performRequest(request);
|
||||
}
|
||||
|
||||
|
@ -325,7 +325,7 @@ public class RestClient implements Closeable {
|
|||
Request request;
|
||||
try {
|
||||
request = new Request(method, endpoint);
|
||||
request.setHeaders(headers);
|
||||
addHeaders(request, headers);
|
||||
} catch (Exception e) {
|
||||
responseListener.onFailure(e);
|
||||
return;
|
||||
|
@ -352,7 +352,7 @@ public class RestClient implements Closeable {
|
|||
try {
|
||||
request = new Request(method, endpoint);
|
||||
addParameters(request, params);
|
||||
request.setHeaders(headers);
|
||||
addHeaders(request, headers);
|
||||
} catch (Exception e) {
|
||||
responseListener.onFailure(e);
|
||||
return;
|
||||
|
@ -383,7 +383,7 @@ public class RestClient implements Closeable {
|
|||
request = new Request(method, endpoint);
|
||||
addParameters(request, params);
|
||||
request.setEntity(entity);
|
||||
request.setHeaders(headers);
|
||||
addHeaders(request, headers);
|
||||
} catch (Exception e) {
|
||||
responseListener.onFailure(e);
|
||||
return;
|
||||
|
@ -420,7 +420,7 @@ public class RestClient implements Closeable {
|
|||
addParameters(request, params);
|
||||
request.setEntity(entity);
|
||||
request.setHttpAsyncResponseConsumerFactory(httpAsyncResponseConsumerFactory);
|
||||
request.setHeaders(headers);
|
||||
addHeaders(request, headers);
|
||||
} catch (Exception e) {
|
||||
responseListener.onFailure(e);
|
||||
return;
|
||||
|
@ -539,9 +539,9 @@ public class RestClient implements Closeable {
|
|||
});
|
||||
}
|
||||
|
||||
private void setHeaders(HttpRequest httpRequest, Header[] requestHeaders) {
|
||||
private void setHeaders(HttpRequest httpRequest, Collection<Header> requestHeaders) {
|
||||
// request headers override default headers, so we don't add default headers if they exist as request headers
|
||||
final Set<String> requestNames = new HashSet<>(requestHeaders.length);
|
||||
final Set<String> requestNames = new HashSet<>(requestHeaders.size());
|
||||
for (Header requestHeader : requestHeaders) {
|
||||
httpRequest.addHeader(requestHeader);
|
||||
requestNames.add(requestHeader.getName());
|
||||
|
@ -877,10 +877,24 @@ public class RestClient implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all headers from the provided varargs argument to a {@link Request}. This only exists
|
||||
* to support methods that exist for backwards compatibility.
|
||||
*/
|
||||
@Deprecated
|
||||
private static void addHeaders(Request request, Header... headers) {
|
||||
Objects.requireNonNull(headers, "headers cannot be null");
|
||||
for (Header header : headers) {
|
||||
Objects.requireNonNull(header, "header cannot be null");
|
||||
request.addHeader(header.getName(), header.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all parameters from a map to a {@link Request}. This only exists
|
||||
* to support methods that exist for backwards compatibility.
|
||||
*/
|
||||
@Deprecated
|
||||
private static void addParameters(Request request, Map<String, String> parameters) {
|
||||
Objects.requireNonNull(parameters, "parameters cannot be null");
|
||||
for (Map.Entry<String, String> entry : parameters.entrySet()) {
|
||||
|
|
|
@ -19,21 +19,21 @@
|
|||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.apache.http.nio.entity.NStringEntity;
|
||||
import org.elasticsearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory;
|
||||
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
@ -127,31 +127,33 @@ public class RequestTests extends RestClientTestCase {
|
|||
assertEquals(json, new String(os.toByteArray(), ContentType.APPLICATION_JSON.getCharset()));
|
||||
}
|
||||
|
||||
public void testSetHeaders() {
|
||||
public void testAddHeader() {
|
||||
final String method = randomFrom(new String[] {"GET", "PUT", "POST", "HEAD", "DELETE"});
|
||||
final String endpoint = randomAsciiLettersOfLengthBetween(1, 10);
|
||||
Request request = new Request(method, endpoint);
|
||||
|
||||
try {
|
||||
request.setHeaders((Header[]) null);
|
||||
request.addHeader(null, randomAsciiLettersOfLengthBetween(3, 10));
|
||||
fail("expected failure");
|
||||
} catch (NullPointerException e) {
|
||||
assertEquals("headers cannot be null", e.getMessage());
|
||||
assertEquals("header name cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
request.setHeaders(new Header [] {null});
|
||||
request.addHeader(randomAsciiLettersOfLengthBetween(3, 10), null);
|
||||
fail("expected failure");
|
||||
} catch (NullPointerException e) {
|
||||
assertEquals("header cannot be null", e.getMessage());
|
||||
assertEquals("header value cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
Header[] headers = new Header[between(0, 5)];
|
||||
for (int i = 0; i < headers.length; i++) {
|
||||
headers[i] = new BasicHeader(randomAsciiAlphanumOfLength(3), randomAsciiAlphanumOfLength(3));
|
||||
int numHeaders = between(0, 5);
|
||||
List<Header> headers = new ArrayList<>();
|
||||
for (int i = 0; i < numHeaders; i++) {
|
||||
Header header = new Request.ReqHeader(randomAsciiAlphanumOfLengthBetween(5, 10), randomAsciiAlphanumOfLength(3));
|
||||
headers.add(header);
|
||||
request.addHeader(header.getName(), header.getValue());
|
||||
}
|
||||
request.setHeaders(headers);
|
||||
assertArrayEquals(headers, request.getHeaders());
|
||||
assertEquals(headers, new ArrayList<>(request.getHeaders()));
|
||||
}
|
||||
|
||||
public void testEqualsAndHashCode() {
|
||||
|
@ -168,7 +170,7 @@ public class RequestTests extends RestClientTestCase {
|
|||
assertNotEquals(mutant, request);
|
||||
}
|
||||
|
||||
private Request randomRequest() {
|
||||
private static Request randomRequest() {
|
||||
Request request = new Request(
|
||||
randomFrom(new String[] {"GET", "PUT", "DELETE", "POST", "HEAD", "OPTIONS"}),
|
||||
randomAsciiAlphanumOfLength(5));
|
||||
|
@ -192,11 +194,9 @@ public class RequestTests extends RestClientTestCase {
|
|||
|
||||
if (randomBoolean()) {
|
||||
int headerCount = between(1, 5);
|
||||
Header[] headers = new Header[headerCount];
|
||||
for (int i = 0; i < headerCount; i++) {
|
||||
headers[i] = new BasicHeader(randomAsciiAlphanumOfLength(3), randomAsciiAlphanumOfLength(3));
|
||||
request.addHeader(randomAsciiAlphanumOfLength(3), randomAsciiAlphanumOfLength(3));
|
||||
}
|
||||
request.setHeaders(headers);
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
|
@ -206,13 +206,13 @@ public class RequestTests extends RestClientTestCase {
|
|||
return request;
|
||||
}
|
||||
|
||||
private Request copy(Request request) {
|
||||
private static Request copy(Request request) {
|
||||
Request copy = new Request(request.getMethod(), request.getEndpoint());
|
||||
copyMutables(request, copy);
|
||||
return copy;
|
||||
}
|
||||
|
||||
private Request mutate(Request request) {
|
||||
private static Request mutate(Request request) {
|
||||
if (randomBoolean()) {
|
||||
// Mutate request or method but keep everything else constant
|
||||
Request mutant = randomBoolean()
|
||||
|
@ -231,11 +231,7 @@ public class RequestTests extends RestClientTestCase {
|
|||
mutant.setJsonEntity("mutant"); // randomRequest can't produce this value
|
||||
return mutant;
|
||||
case 2:
|
||||
if (mutant.getHeaders().length > 0) {
|
||||
mutant.setHeaders(new Header[0]);
|
||||
} else {
|
||||
mutant.setHeaders(new BasicHeader("extra", "m"));
|
||||
}
|
||||
mutant.addHeader("extra", "m");
|
||||
return mutant;
|
||||
case 3:
|
||||
mutant.setHttpAsyncResponseConsumerFactory(new HeapBufferedResponseConsumerFactory(5));
|
||||
|
@ -245,12 +241,14 @@ public class RequestTests extends RestClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private void copyMutables(Request from, Request to) {
|
||||
private static void copyMutables(Request from, Request to) {
|
||||
for (Map.Entry<String, String> param : from.getParameters().entrySet()) {
|
||||
to.addParameter(param.getKey(), param.getValue());
|
||||
}
|
||||
to.setEntity(from.getEntity());
|
||||
to.setHeaders(from.getHeaders());
|
||||
for (Header header : from.getHeaders()) {
|
||||
to.addHeader(header.getName(), header.getValue());
|
||||
}
|
||||
to.setHttpAsyncResponseConsumerFactory(from.getHttpAsyncResponseConsumerFactory());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.http.HttpHost;
|
|||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.apache.http.impl.client.TargetAuthenticationStrategy;
|
||||
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
|
||||
|
@ -379,7 +378,9 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
|
|||
String requestBody = "{ \"field\": \"value\" }";
|
||||
Request request = new Request(method, "/" + statusCode);
|
||||
request.setJsonEntity(requestBody);
|
||||
request.setHeaders(headers);
|
||||
for (Header header : headers) {
|
||||
request.addHeader(header.getName(), header.getValue());
|
||||
}
|
||||
Response esResponse;
|
||||
try {
|
||||
esResponse = restClient.performRequest(request);
|
||||
|
|
|
@ -312,7 +312,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testSetHeaders()}.
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddHeaders()}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void tesPerformRequestOldStyleNullHeaders() throws IOException {
|
||||
|
@ -333,7 +333,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testSetParameters()}.
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddParameters()}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void testPerformRequestOldStyleWithNullParams() throws IOException {
|
||||
|
@ -362,7 +362,9 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||
final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header");
|
||||
final int statusCode = randomStatusCode(getRandom());
|
||||
Request request = new Request(method, "/" + statusCode);
|
||||
request.setHeaders(requestHeaders);
|
||||
for (Header requestHeader : requestHeaders) {
|
||||
request.addHeader(requestHeader.getName(), requestHeader.getValue());
|
||||
}
|
||||
Response esResponse;
|
||||
try {
|
||||
esResponse = restClient.performRequest(request);
|
||||
|
@ -436,9 +438,9 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||
final Set<String> uniqueNames = new HashSet<>();
|
||||
if (randomBoolean()) {
|
||||
Header[] headers = RestClientTestUtil.randomHeaders(getRandom(), "Header");
|
||||
request.setHeaders(headers);
|
||||
for (Header header : headers) {
|
||||
expectedRequest.addHeader(header);
|
||||
request.addHeader(header.getName(), header.getValue());
|
||||
expectedRequest.addHeader(new Request.ReqHeader(header.getName(), header.getValue()));
|
||||
uniqueNames.add(header.getName());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,11 +27,13 @@ import java.io.IOException;
|
|||
import java.net.URI;
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
|
@ -57,17 +59,20 @@ public class RestClientTests extends RestClientTestCase {
|
|||
restClient.performRequestAsync(new Request("unsupported", randomAsciiLettersOfLength(5)), new ResponseListener() {
|
||||
@Override
|
||||
public void onSuccess(Response response) {
|
||||
fail("should have failed because of unsupported method");
|
||||
throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception exception) {
|
||||
assertThat(exception, instanceOf(UnsupportedOperationException.class));
|
||||
assertEquals("http method not supported: unsupported", exception.getMessage());
|
||||
latch.countDown();
|
||||
try {
|
||||
assertThat(exception, instanceOf(UnsupportedOperationException.class));
|
||||
assertEquals("http method not supported: unsupported", exception.getMessage());
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
latch.await();
|
||||
assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -81,17 +86,20 @@ public class RestClientTests extends RestClientTestCase {
|
|||
restClient.performRequestAsync("unsupported", randomAsciiLettersOfLength(5), new ResponseListener() {
|
||||
@Override
|
||||
public void onSuccess(Response response) {
|
||||
fail("should have failed because of unsupported method");
|
||||
throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception exception) {
|
||||
assertThat(exception, instanceOf(UnsupportedOperationException.class));
|
||||
assertEquals("http method not supported: unsupported", exception.getMessage());
|
||||
latch.countDown();
|
||||
try {
|
||||
assertThat(exception, instanceOf(UnsupportedOperationException.class));
|
||||
assertEquals("http method not supported: unsupported", exception.getMessage());
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
latch.await();
|
||||
assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -105,22 +113,25 @@ public class RestClientTests extends RestClientTestCase {
|
|||
restClient.performRequestAsync(randomAsciiLettersOfLength(5), randomAsciiLettersOfLength(5), null, new ResponseListener() {
|
||||
@Override
|
||||
public void onSuccess(Response response) {
|
||||
fail("should have failed because of null parameters");
|
||||
throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception exception) {
|
||||
assertThat(exception, instanceOf(NullPointerException.class));
|
||||
assertEquals("parameters cannot be null", exception.getMessage());
|
||||
latch.countDown();
|
||||
try {
|
||||
assertThat(exception, instanceOf(NullPointerException.class));
|
||||
assertEquals("parameters cannot be null", exception.getMessage());
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
latch.await();
|
||||
assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testSetHeaders()}.
|
||||
* @deprecated will remove method in 7.0 but needs tests until then. Replaced by {@link RequestTests#testAddHeader()}.
|
||||
*/
|
||||
@Deprecated
|
||||
public void testPerformOldStyleAsyncWithNullHeaders() throws Exception {
|
||||
|
@ -129,18 +140,21 @@ public class RestClientTests extends RestClientTestCase {
|
|||
ResponseListener listener = new ResponseListener() {
|
||||
@Override
|
||||
public void onSuccess(Response response) {
|
||||
fail("should have failed because of null headers");
|
||||
throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception exception) {
|
||||
assertThat(exception, instanceOf(NullPointerException.class));
|
||||
assertEquals("header cannot be null", exception.getMessage());
|
||||
latch.countDown();
|
||||
try {
|
||||
assertThat(exception, instanceOf(NullPointerException.class));
|
||||
assertEquals("header cannot be null", exception.getMessage());
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
};
|
||||
restClient.performRequestAsync("GET", randomAsciiLettersOfLength(5), listener, (Header) null);
|
||||
latch.await();
|
||||
assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -150,17 +164,20 @@ public class RestClientTests extends RestClientTestCase {
|
|||
restClient.performRequestAsync(new Request("GET", "::http:///"), new ResponseListener() {
|
||||
@Override
|
||||
public void onSuccess(Response response) {
|
||||
fail("should have failed because of wrong endpoint");
|
||||
throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception exception) {
|
||||
assertThat(exception, instanceOf(IllegalArgumentException.class));
|
||||
assertEquals("Expected scheme name at index 0: ::http:///", exception.getMessage());
|
||||
latch.countDown();
|
||||
try {
|
||||
assertThat(exception, instanceOf(IllegalArgumentException.class));
|
||||
assertEquals("Expected scheme name at index 0: ::http:///", exception.getMessage());
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
latch.await();
|
||||
assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -174,17 +191,20 @@ public class RestClientTests extends RestClientTestCase {
|
|||
restClient.performRequestAsync("GET", "::http:///", new ResponseListener() {
|
||||
@Override
|
||||
public void onSuccess(Response response) {
|
||||
fail("should have failed because of wrong endpoint");
|
||||
throw new UnsupportedOperationException("onSuccess cannot be called when using a mocked http client");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception exception) {
|
||||
assertThat(exception, instanceOf(IllegalArgumentException.class));
|
||||
assertEquals("Expected scheme name at index 0: ::http:///", exception.getMessage());
|
||||
latch.countDown();
|
||||
try {
|
||||
assertThat(exception, instanceOf(IllegalArgumentException.class));
|
||||
assertEquals("Expected scheme name at index 0: ::http:///", exception.getMessage());
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
latch.await();
|
||||
assertTrue("time out waiting for request to return", latch.await(1000, TimeUnit.MILLISECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -27,9 +27,7 @@ import org.apache.http.auth.AuthScope;
|
|||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.entity.BasicHttpEntity;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
|
||||
import org.apache.http.impl.nio.reactor.IOReactorConfig;
|
||||
|
@ -52,8 +50,6 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.KeyStore;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
/**
|
||||
|
@ -176,9 +172,8 @@ public class RestClientDocumentation {
|
|||
request.setJsonEntity("{\"json\":\"text\"}");
|
||||
//end::rest-client-body-shorter
|
||||
//tag::rest-client-headers
|
||||
request.setHeaders(
|
||||
new BasicHeader("Accept", "text/plain"),
|
||||
new BasicHeader("Cache-Control", "no-cache"));
|
||||
request.addHeader("Accept", "text/plain");
|
||||
request.addHeader("Cache-Control", "no-cache");
|
||||
//end::rest-client-headers
|
||||
//tag::rest-client-response-consumer
|
||||
request.setHttpAsyncResponseConsumerFactory(
|
||||
|
|
|
@ -100,7 +100,7 @@ final class RestClientTestUtil {
|
|||
if (random.nextBoolean()) {
|
||||
headerName = headerName + i;
|
||||
}
|
||||
headers[i] = new BasicHeader(headerName, RandomStrings.randomAsciiOfLengthBetween(random, 3, 10));
|
||||
headers[i] = new BasicHeader(headerName, RandomStrings.randomAsciiLettersOfLengthBetween(random, 3, 10));
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
call "%~dp0elasticsearch-env.bat" || exit /b 1
|
||||
|
||||
if defined ES_ADDITIONAL_SOURCES (
|
||||
for %%a in ("%ES_ADDITIONAL_SOURCES:;=","%") do (
|
||||
call %~dp0%%a
|
||||
)
|
||||
)
|
||||
|
||||
for /f "tokens=1*" %%a in ("%*") do (
|
||||
set main_class=%%a
|
||||
set arguments=%%b
|
||||
)
|
||||
|
||||
%JAVA% ^
|
||||
%ES_JAVA_OPTS% ^
|
||||
-Des.path.home="%ES_HOME%" ^
|
||||
-Des.path.conf="%ES_PATH_CONF%" ^
|
||||
-Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%" ^
|
||||
-Des.distribution.type="%ES_DISTRIBUTION_TYPE%" ^
|
||||
-cp "%ES_CLASSPATH%" ^
|
||||
%main_class% ^
|
||||
%arguments%
|
|
@ -3,17 +3,10 @@
|
|||
setlocal enabledelayedexpansion
|
||||
setlocal enableextensions
|
||||
|
||||
call "%~dp0elasticsearch-env.bat" || exit /b 1
|
||||
|
||||
%JAVA% ^
|
||||
%ES_JAVA_OPTS% ^
|
||||
-Des.path.home="%ES_HOME%" ^
|
||||
-Des.path.conf="%ES_PATH_CONF%" ^
|
||||
-Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%" ^
|
||||
-Des.distribution.type="%ES_DISTRIBUTION_TYPE%" ^
|
||||
-cp "%ES_CLASSPATH%" ^
|
||||
org.elasticsearch.common.settings.KeyStoreCli ^
|
||||
%*
|
||||
call "%~dp0elasticsearch-cli.bat" ^
|
||||
org.elasticsearch.common.settings.KeyStoreCli ^
|
||||
%* ^
|
||||
|| exit /b 1
|
||||
|
||||
endlocal
|
||||
endlocal
|
||||
|
|
|
@ -3,17 +3,10 @@
|
|||
setlocal enabledelayedexpansion
|
||||
setlocal enableextensions
|
||||
|
||||
call "%~dp0elasticsearch-env.bat" || exit /b 1
|
||||
|
||||
%JAVA% ^
|
||||
%ES_JAVA_OPTS% ^
|
||||
-Des.path.home="%ES_HOME%" ^
|
||||
-Des.path.conf="%ES_PATH_CONF%" ^
|
||||
-Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%" ^
|
||||
-Des.distribution.type="%ES_DISTRIBUTION_TYPE%" ^
|
||||
-cp "%ES_CLASSPATH%" ^
|
||||
call "%~dp0elasticsearch-cli.bat" ^
|
||||
org.elasticsearch.plugins.PluginCli ^
|
||||
%*
|
||||
%* ^
|
||||
|| exit /b 1
|
||||
|
||||
endlocal
|
||||
endlocal
|
||||
|
|
|
@ -3,17 +3,10 @@
|
|||
setlocal enabledelayedexpansion
|
||||
setlocal enableextensions
|
||||
|
||||
call "%~dp0elasticsearch-env.bat" || exit /b 1
|
||||
|
||||
%JAVA% ^
|
||||
%ES_JAVA_OPTS% ^
|
||||
-Des.path.home="%ES_HOME%" ^
|
||||
-Des.path.conf="%ES_PATH_CONF%" ^
|
||||
-Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%" ^
|
||||
-Des.distribution.type="%ES_DISTRIBUTION_TYPE%" ^
|
||||
-cp "%ES_CLASSPATH%" ^
|
||||
call "%~dp0elasticsearch-cli.bat" ^
|
||||
org.elasticsearch.index.translog.TranslogToolCli ^
|
||||
%*
|
||||
%* ^
|
||||
|| exit /b 1
|
||||
|
||||
endlocal
|
||||
endlocal
|
||||
|
|
|
@ -271,7 +271,7 @@ a `ContentType` of `application/json`.
|
|||
include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-body-shorter]
|
||||
--------------------------------------------------
|
||||
|
||||
And you can set a list of headers to send with the request:
|
||||
And you can add one or more headers to send with the request:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -103,6 +103,11 @@ The `simple_pattern` tokenizer uses a regular expression to capture matching
|
|||
text as terms. It uses a restricted subset of regular expression features
|
||||
and is generally faster than the `pattern` tokenizer.
|
||||
|
||||
<<analysis-chargroup-tokenizer,Char Group Tokenizer>>::
|
||||
|
||||
The `char_group` tokenizer is configurable through sets of characters to split
|
||||
on, which is usually less expensive than running regular expressions.
|
||||
|
||||
<<analysis-simplepatternsplit-tokenizer,Simple Pattern Split Tokenizer>>::
|
||||
|
||||
The `simple_pattern_split` tokenizer uses the same restricted regular expression
|
||||
|
@ -143,6 +148,8 @@ include::tokenizers/keyword-tokenizer.asciidoc[]
|
|||
|
||||
include::tokenizers/pattern-tokenizer.asciidoc[]
|
||||
|
||||
include::tokenizers/chargroup-tokenizer.asciidoc[]
|
||||
|
||||
include::tokenizers/simplepattern-tokenizer.asciidoc[]
|
||||
|
||||
include::tokenizers/simplepatternsplit-tokenizer.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
[[analysis-chargroup-tokenizer]]
|
||||
=== Char Group Tokenizer
|
||||
|
||||
The `char_group` tokenizer breaks text into terms whenever it encounters a
|
||||
character which is in a defined set. It is mostly useful for cases where a simple
|
||||
custom tokenization is desired, and the overhead of use of the <<analysis-pattern-tokenizer, `pattern` tokenizer>>
|
||||
is not acceptable.
|
||||
|
||||
[float]
|
||||
=== Configuration
|
||||
|
||||
The `char_group` tokenizer accepts one parameter:
|
||||
|
||||
[horizontal]
|
||||
`tokenize_on_chars`::
|
||||
A list containing a list of characters to tokenize the string on. Whenever a character
|
||||
from this list is encountered, a new token is started. This accepts either single
|
||||
characters like eg. `-`, or character groups: `whitespace`, `letter`, `digit`,
|
||||
`punctuation`, `symbol`.
|
||||
|
||||
|
||||
[float]
|
||||
=== Example output
|
||||
|
||||
[source,js]
|
||||
---------------------------
|
||||
POST _analyze
|
||||
{
|
||||
"tokenizer": {
|
||||
"type": "char_group",
|
||||
"tokenize_on_chars": [
|
||||
"whitespace",
|
||||
"-",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
"text": "The QUICK brown-fox"
|
||||
}
|
||||
---------------------------
|
||||
// CONSOLE
|
||||
|
||||
returns
|
||||
|
||||
[source,js]
|
||||
---------------------------
|
||||
{
|
||||
"tokens": [
|
||||
{
|
||||
"token": "The",
|
||||
"start_offset": 0,
|
||||
"end_offset": 3,
|
||||
"type": "word",
|
||||
"position": 0
|
||||
},
|
||||
{
|
||||
"token": "QUICK",
|
||||
"start_offset": 4,
|
||||
"end_offset": 9,
|
||||
"type": "word",
|
||||
"position": 1
|
||||
},
|
||||
{
|
||||
"token": "brown",
|
||||
"start_offset": 10,
|
||||
"end_offset": 15,
|
||||
"type": "word",
|
||||
"position": 2
|
||||
},
|
||||
{
|
||||
"token": "fox",
|
||||
"start_offset": 16,
|
||||
"end_offset": 19,
|
||||
"type": "word",
|
||||
"position": 3
|
||||
}
|
||||
]
|
||||
}
|
||||
---------------------------
|
||||
// TESTRESPONSE
|
||||
|
|
@ -40,6 +40,8 @@ string:: <<text,`text`>> and <<keyword,`keyword`>>
|
|||
|
||||
<<parent-join>>:: Defines parent/child relation for documents within the same index
|
||||
|
||||
<<feature>>:: Record numeric features to boost hits at query time.
|
||||
|
||||
[float]
|
||||
=== Multi-fields
|
||||
|
||||
|
@ -86,6 +88,6 @@ include::types/percolator.asciidoc[]
|
|||
|
||||
include::types/parent-join.asciidoc[]
|
||||
|
||||
|
||||
include::types/feature.asciidoc[]
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
[[feature]]
|
||||
=== Feature datatype
|
||||
|
||||
A `feature` field can index numbers so that they can later be used to boost
|
||||
documents in queries with a <<query-dsl-feature-query,`feature`>> query.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT my_index
|
||||
{
|
||||
"mappings": {
|
||||
"_doc": {
|
||||
"properties": {
|
||||
"pagerank": {
|
||||
"type": "feature" <1>
|
||||
},
|
||||
"url_length": {
|
||||
"type": "feature",
|
||||
"positive_score_impact": false <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT my_index/_doc/1
|
||||
{
|
||||
"pagerank": 8,
|
||||
"url_length": 22
|
||||
}
|
||||
|
||||
GET my_index/_search
|
||||
{
|
||||
"query": {
|
||||
"feature": {
|
||||
"field": "pagerank"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
<1> Feature fields must use the `feature` field type
|
||||
<2> Features that correlate negatively with the score need to declare it
|
||||
|
||||
NOTE: `feature` fields only support single-valued fields and strictly positive
|
||||
values. Multi-valued fields and negative values will be rejected.
|
||||
|
||||
NOTE: `feature` fields do not support querying, sorting or aggregating. They may
|
||||
only be used within <<query-dsl-feature-query,`feature`>> queries.
|
||||
|
||||
NOTE: `feature` fields only preserve 9 significant bits for the precision, which
|
||||
translates to a relative error of about 0.4%.
|
||||
|
||||
Features that correlate negatively with the score should set
|
||||
`positive_score_impact` to `false` (defaults to `true`). This will be used by
|
||||
the <<query-dsl-feature-query,`feature`>> query to modify the scoring formula
|
||||
in such a way that the score decreases with the value of the feature instead of
|
||||
increasing. For instance in web search, the url length is a commonly used
|
||||
feature which correlates negatively with scores.
|
|
@ -10,7 +10,7 @@ You need to use settings which are starting with `azure.client.` prefix instead.
|
|||
* Global timeout setting `cloud.azure.storage.timeout` has been removed.
|
||||
You must set it per azure client instead. Like `azure.client.default.timeout: 10s` for example.
|
||||
|
||||
See {plugins}/repository-azure-usage.html#repository-azure-repository-settings[Azure Repository settings].
|
||||
See {plugins}/repository-azure-repository-settings.html#repository-azure-repository-settings[Azure Repository settings].
|
||||
|
||||
==== Google Cloud Storage Repository plugin
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ memory on a node. The memory usage is based on the content length of the request
|
|||
[float]
|
||||
==== Accounting requests circuit breaker
|
||||
|
||||
The in flight requests circuit breaker allows Elasticsearch to limit the memory
|
||||
The accounting circuit breaker allows Elasticsearch to limit the memory
|
||||
usage of things held in memory that are not released when a request is
|
||||
completed. This includes things like the Lucene segment memory.
|
||||
|
||||
|
|
|
@ -0,0 +1,181 @@
|
|||
[[query-dsl-feature-query]]
|
||||
=== Feature Query
|
||||
|
||||
The `feature` query is a specialized query that only works on
|
||||
<<feature,`feature`>> fields. Its goal is to boost the score of documents based
|
||||
on the values of numeric features. It is typically put in a `should` clause of
|
||||
a <<query-dsl-bool-query,`bool`>> query so that its score is added to the score
|
||||
of the query.
|
||||
|
||||
Compared to using <<query-dsl-function-score-query,`function_score`>> or other
|
||||
ways to modify the score, this query has the benefit of being able to
|
||||
efficiently skip non-competitive hits when
|
||||
<<search-uri-request,`track_total_hits`>> is set to `false`. Speedups may be
|
||||
spectacular.
|
||||
|
||||
Here is an example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT test
|
||||
{
|
||||
"mappings": {
|
||||
"_doc": {
|
||||
"properties": {
|
||||
"pagerank": {
|
||||
"type": "feature"
|
||||
},
|
||||
"url_length": {
|
||||
"type": "feature",
|
||||
"positive_score_impact": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT test/_doc/1
|
||||
{
|
||||
"pagerank": 10,
|
||||
"url_length": 50
|
||||
}
|
||||
|
||||
PUT test/_doc/2
|
||||
{
|
||||
"pagerank": 100,
|
||||
"url_length": 20
|
||||
}
|
||||
|
||||
POST test/_refresh
|
||||
|
||||
GET test/_search
|
||||
{
|
||||
"query": {
|
||||
"feature": {
|
||||
"field": "pagerank"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GET test/_search
|
||||
{
|
||||
"query": {
|
||||
"feature": {
|
||||
"field": "url_length"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
[float]
|
||||
=== Supported functions
|
||||
|
||||
The `feature` query supports 3 functions in order to boost scores using the
|
||||
values of features. If you do not know where to start, we recommend that you
|
||||
start with the `saturation` function, which is the default when no function is
|
||||
provided.
|
||||
|
||||
[float]
|
||||
==== Saturation
|
||||
|
||||
This function gives a score that is equal to `S / (S + pivot)` where `S` is the
|
||||
value of the feature and `pivot` is a configurable pivot value so that the
|
||||
result will be less than +0.5+ if `S` is less than pivot and greater than +0.5+
|
||||
otherwise. Scores are always is +(0, 1)+.
|
||||
|
||||
If the feature has a negative score impact then the function will be computed as
|
||||
`pivot / (S + pivot)`, which decreases when `S` increases.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET test/_search
|
||||
{
|
||||
"query": {
|
||||
"feature": {
|
||||
"field": "pagerank",
|
||||
"saturation": {
|
||||
"pivot": 8
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
If +pivot+ is not supplied then Elasticsearch will compute a default value that
|
||||
will be approximately equal to the geometric mean of all feature values that
|
||||
exist in the index. We recommend this if you haven't had the opportunity to
|
||||
train a good pivot value.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET test/_search
|
||||
{
|
||||
"query": {
|
||||
"feature": {
|
||||
"field": "pagerank",
|
||||
"saturation": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
[float]
|
||||
==== Logarithm
|
||||
|
||||
This function gives a score that is equal to `log(scaling_factor + S)` where
|
||||
`S` is the value of the feature and `scaling_factor` is a configurable scaling
|
||||
factor. Scores are unbounded.
|
||||
|
||||
This function only supports features that have a positive score impact.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET test/_search
|
||||
{
|
||||
"query": {
|
||||
"feature": {
|
||||
"field": "pagerank",
|
||||
"log": {
|
||||
"scaling_factor": 4
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
[float]
|
||||
==== Sigmoid
|
||||
|
||||
This function is an extension of `saturation` which adds a configurable
|
||||
exponent. Scores are computed as `S^exp^ / (S^exp^ + pivot^exp^)`. Like for the
|
||||
`saturation` function, `pivot` is the value of `S` that gives a score of +0.5+
|
||||
and scores are in +(0, 1)+.
|
||||
|
||||
`exponent` must be positive, but is typically in +[0.5, 1]+. A good value should
|
||||
be computed via traning. If you don't have the opportunity to do so, we recommend
|
||||
that you stick to the `saturation` function instead.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET test/_search
|
||||
{
|
||||
"query": {
|
||||
"feature": {
|
||||
"field": "pagerank",
|
||||
"sigmoid": {
|
||||
"pivot": 7,
|
||||
"exponent": 0.6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
|
@ -19,6 +19,11 @@ This query allows a script to act as a filter. Also see the
|
|||
This query finds queries that are stored as documents that match with
|
||||
the specified document.
|
||||
|
||||
<<query-dsl-feature-query,`feature` query>>::
|
||||
|
||||
A query that computes scores based on the values of numeric features and is
|
||||
able to efficiently skip non-competitive hits.
|
||||
|
||||
<<query-dsl-wrapper-query,`wrapper` query>>::
|
||||
|
||||
A query that accepts other queries as json or yaml string.
|
||||
|
@ -29,4 +34,6 @@ include::script-query.asciidoc[]
|
|||
|
||||
include::percolate-query.asciidoc[]
|
||||
|
||||
include::feature-query.asciidoc[]
|
||||
|
||||
include::wrapper-query.asciidoc[]
|
||||
|
|
|
@ -11,13 +11,38 @@ GET /_search
|
|||
"query" : {
|
||||
"match_all": {}
|
||||
},
|
||||
"docvalue_fields" : ["test1", "test2"]
|
||||
"docvalue_fields" : [
|
||||
{
|
||||
"field": "my_ip_field", <1>
|
||||
"format": "use_field_mapping" <2>
|
||||
},
|
||||
{
|
||||
"field": "my_date_field",
|
||||
"format": "epoch_millis" <3>
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
<1> the name of the field
|
||||
<2> the special `use_field_mapping` format tells Elasticsearch to use the format from the mapping
|
||||
<3> date fields may use a custom format
|
||||
|
||||
Doc value fields can work on fields that are not stored.
|
||||
|
||||
Note that if the fields parameter specifies fields without docvalues it will try to load the value from the fielddata cache
|
||||
causing the terms for that field to be loaded to memory (cached), which will result in more memory consumption.
|
||||
|
||||
[float]
|
||||
==== Custom formats
|
||||
|
||||
While most fields do not support custom formats, some of them do:
|
||||
- <<date,Date>> fields can take any <<mapping-date-format,date format>>.
|
||||
- <<number,Numeric>> fields accept a https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html[DecimalFormat pattern].
|
||||
|
||||
All fields support the special `use_field_mapping` format, which tells
|
||||
Elasticsearch to use the mappings to figure out a default format.
|
||||
|
||||
NOTE: The default is currently to return the same output as
|
||||
<<search-request-script-fields,script fields>>. However it will change in 7.0
|
||||
to behave as if the `use_field_mapping` format was provided.
|
||||
|
|
|
@ -242,7 +242,12 @@ POST test/_search
|
|||
},
|
||||
"inner_hits": {
|
||||
"_source" : false,
|
||||
"docvalue_fields" : ["comments.text.keyword"]
|
||||
"docvalue_fields" : [
|
||||
{
|
||||
"field": "comments.text.keyword",
|
||||
"format": "use_field_mapping"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,13 +15,13 @@ GET /_search
|
|||
"test1" : {
|
||||
"script" : {
|
||||
"lang": "painless",
|
||||
"source": "doc['my_field_name'].value * 2"
|
||||
"source": "doc['price'].value * 2"
|
||||
}
|
||||
},
|
||||
"test2" : {
|
||||
"script" : {
|
||||
"lang": "painless",
|
||||
"source": "doc['my_field_name'].value * params.factor",
|
||||
"source": "doc['price'].value * params.factor",
|
||||
"params" : {
|
||||
"factor" : 2.0
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ GET /_search
|
|||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
// TEST[setup:sales]
|
||||
|
||||
Script fields can work on fields that are not stored (`my_field_name` in
|
||||
the above case), and allow to return custom values to be returned (the
|
||||
|
|
|
@ -7,7 +7,7 @@ scattered to all the relevant shards and then all the results are
|
|||
gathered back. When doing scatter/gather type execution, there are
|
||||
several ways to do that, specifically with search engines.
|
||||
|
||||
One of the questions when executing a distributed search is how much
|
||||
One of the questions when executing a distributed search is how many
|
||||
results to retrieve from each shard. For example, if we have 10 shards,
|
||||
the 1st shard might hold the most relevant results from 0 till 10, with
|
||||
other shards results ranking below it. For this reason, when executing a
|
||||
|
|
|
@ -0,0 +1,135 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.apache.lucene.analysis.util.CharTokenizer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractTokenizerFactory;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
public class CharGroupTokenizerFactory extends AbstractTokenizerFactory{
|
||||
|
||||
private final Set<Integer> tokenizeOnChars = new HashSet<>();
|
||||
private boolean tokenizeOnSpace = false;
|
||||
private boolean tokenizeOnLetter = false;
|
||||
private boolean tokenizeOnDigit = false;
|
||||
private boolean tokenizeOnPunctuation = false;
|
||||
private boolean tokenizeOnSymbol = false;
|
||||
|
||||
public CharGroupTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
|
||||
for (final String c : settings.getAsList("tokenize_on_chars")) {
|
||||
if (c == null || c.length() == 0) {
|
||||
throw new RuntimeException("[tokenize_on_chars] cannot contain empty characters");
|
||||
}
|
||||
|
||||
if (c.length() == 1) {
|
||||
tokenizeOnChars.add((int) c.charAt(0));
|
||||
}
|
||||
else if (c.charAt(0) == '\\') {
|
||||
tokenizeOnChars.add((int) parseEscapedChar(c));
|
||||
} else {
|
||||
switch (c) {
|
||||
case "letter":
|
||||
tokenizeOnLetter = true;
|
||||
break;
|
||||
case "digit":
|
||||
tokenizeOnDigit = true;
|
||||
break;
|
||||
case "whitespace":
|
||||
tokenizeOnSpace = true;
|
||||
break;
|
||||
case "punctuation":
|
||||
tokenizeOnPunctuation = true;
|
||||
break;
|
||||
case "symbol":
|
||||
tokenizeOnSymbol = true;
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("Invalid escaped char in [" + c + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private char parseEscapedChar(final String s) {
|
||||
int len = s.length();
|
||||
char c = s.charAt(0);
|
||||
if (c == '\\') {
|
||||
if (1 >= len)
|
||||
throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
||||
c = s.charAt(1);
|
||||
switch (c) {
|
||||
case '\\':
|
||||
return '\\';
|
||||
case 'n':
|
||||
return '\n';
|
||||
case 't':
|
||||
return '\t';
|
||||
case 'r':
|
||||
return '\r';
|
||||
case 'b':
|
||||
return '\b';
|
||||
case 'f':
|
||||
return '\f';
|
||||
case 'u':
|
||||
if (len > 6) {
|
||||
throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
||||
}
|
||||
return (char) Integer.parseInt(s.substring(2), 16);
|
||||
default:
|
||||
throw new RuntimeException("Invalid escaped char " + c + " in [" + s + "]");
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("Invalid escaped char [" + s + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Tokenizer create() {
|
||||
return new CharTokenizer() {
|
||||
@Override
|
||||
protected boolean isTokenChar(int c) {
|
||||
if (tokenizeOnSpace && Character.isWhitespace(c)) {
|
||||
return false;
|
||||
}
|
||||
if (tokenizeOnLetter && Character.isLetter(c)) {
|
||||
return false;
|
||||
}
|
||||
if (tokenizeOnDigit && Character.isDigit(c)) {
|
||||
return false;
|
||||
}
|
||||
if (tokenizeOnPunctuation && CharMatcher.Basic.PUNCTUATION.isTokenChar(c)) {
|
||||
return false;
|
||||
}
|
||||
if (tokenizeOnSymbol && CharMatcher.Basic.SYMBOL.isTokenChar(c)) {
|
||||
return false;
|
||||
}
|
||||
return !tokenizeOnChars.contains(c);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -184,6 +184,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
|||
tokenizers.put("ngram", NGramTokenizerFactory::new);
|
||||
tokenizers.put("edgeNGram", EdgeNGramTokenizerFactory::new);
|
||||
tokenizers.put("edge_ngram", EdgeNGramTokenizerFactory::new);
|
||||
tokenizers.put("char_group", CharGroupTokenizerFactory::new);
|
||||
tokenizers.put("classic", ClassicTokenizerFactory::new);
|
||||
tokenizers.put("letter", LetterTokenizerFactory::new);
|
||||
tokenizers.put("lowercase", LowerCaseTokenizerFactory::new);
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.Tokenizer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.test.ESTokenStreamTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.util.Arrays;
|
||||
|
||||
|
||||
public class CharGroupTokenizerFactoryTests extends ESTokenStreamTestCase {
|
||||
public void testParseTokenChars() {
|
||||
final Index index = new Index("test", "_na_");
|
||||
final Settings indexSettings = newAnalysisSettingsBuilder().build();
|
||||
IndexSettings indexProperties = IndexSettingsModule.newIndexSettings(index, indexSettings);
|
||||
final String name = "cg";
|
||||
for (String[] conf : Arrays.asList(
|
||||
new String[] { "\\v" },
|
||||
new String[] { "\\u00245" },
|
||||
new String[] { "commas" },
|
||||
new String[] { "a", "b", "c", "\\$" })) {
|
||||
final Settings settings = newAnalysisSettingsBuilder().putList("tokenize_on_chars", conf).build();
|
||||
expectThrows(RuntimeException.class, () -> new CharGroupTokenizerFactory(indexProperties, null, name, settings).create());
|
||||
}
|
||||
|
||||
for (String[] conf : Arrays.asList(
|
||||
new String[0],
|
||||
new String[] { "\\n" },
|
||||
new String[] { "\\u0024" },
|
||||
new String[] { "whitespace" },
|
||||
new String[] { "a", "b", "c" },
|
||||
new String[] { "a", "b", "c", "\\r" },
|
||||
new String[] { "\\r" },
|
||||
new String[] { "f", "o", "o", "symbol" })) {
|
||||
final Settings settings = newAnalysisSettingsBuilder().putList("tokenize_on_chars", Arrays.asList(conf)).build();
|
||||
new CharGroupTokenizerFactory(indexProperties, null, name, settings).create();
|
||||
// no exception
|
||||
}
|
||||
}
|
||||
|
||||
public void testTokenization() throws IOException {
|
||||
final Index index = new Index("test", "_na_");
|
||||
final String name = "cg";
|
||||
final Settings indexSettings = newAnalysisSettingsBuilder().build();
|
||||
final Settings settings = newAnalysisSettingsBuilder().putList("tokenize_on_chars", "whitespace", ":", "\\u0024").build();
|
||||
Tokenizer tokenizer = new CharGroupTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings),
|
||||
null, name, settings).create();
|
||||
tokenizer.setReader(new StringReader("foo bar $34 test:test2"));
|
||||
assertTokenStreamContents(tokenizer, new String[] {"foo", "bar", "34", "test", "test2"});
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
a3dba337d06e1f5930cb7ae638c1655b99ce0cb7
|
|
@ -0,0 +1 @@
|
|||
1e28b448387ec05d655f8c81ee54e13ff2975a4d
|
|
@ -0,0 +1,248 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.document.FeatureField;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A {@link FieldMapper} that exposes Lucene's {@link FeatureField}.
|
||||
*/
|
||||
public class FeatureFieldMapper extends FieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "feature";
|
||||
|
||||
public static class Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new FeatureFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setTokenized(false);
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
|
||||
FIELD_TYPE.setHasDocValues(false);
|
||||
FIELD_TYPE.setOmitNorms(true);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder extends FieldMapper.Builder<Builder, FeatureFieldMapper> {
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FeatureFieldType fieldType() {
|
||||
return (FeatureFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
public Builder positiveScoreImpact(boolean v) {
|
||||
fieldType().setPositiveScoreImpact(v);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FeatureFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new FeatureFieldMapper(
|
||||
name, fieldType, defaultFieldType,
|
||||
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
FeatureFieldMapper.Builder builder = new FeatureFieldMapper.Builder(name);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = entry.getKey();
|
||||
Object propNode = entry.getValue();
|
||||
if (propName.equals("positive_score_impact")) {
|
||||
builder.positiveScoreImpact(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
public static final class FeatureFieldType extends MappedFieldType {
|
||||
|
||||
private boolean positiveScoreImpact = true;
|
||||
|
||||
public FeatureFieldType() {
|
||||
setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
}
|
||||
|
||||
protected FeatureFieldType(FeatureFieldType ref) {
|
||||
super(ref);
|
||||
this.positiveScoreImpact = ref.positiveScoreImpact;
|
||||
}
|
||||
|
||||
public FeatureFieldType clone() {
|
||||
return new FeatureFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (super.equals(o) == false) {
|
||||
return false;
|
||||
}
|
||||
FeatureFieldType other = (FeatureFieldType) o;
|
||||
return Objects.equals(positiveScoreImpact, other.positiveScoreImpact);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int h = super.hashCode();
|
||||
h = 31 * h + Objects.hashCode(positiveScoreImpact);
|
||||
return h;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
|
||||
super.checkCompatibility(other, conflicts);
|
||||
if (positiveScoreImpact != ((FeatureFieldType) other).positiveScoreImpact()) {
|
||||
conflicts.add("mapper [" + name() + "] has different [positive_score_impact] values");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
public boolean positiveScoreImpact() {
|
||||
return positiveScoreImpact;
|
||||
}
|
||||
|
||||
public void setPositiveScoreImpact(boolean positiveScoreImpact) {
|
||||
checkIfFrozen();
|
||||
this.positiveScoreImpact = positiveScoreImpact;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query existsQuery(QueryShardContext context) {
|
||||
return new TermQuery(new Term("_feature", name()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query nullValueQuery() {
|
||||
if (nullValue() == null) {
|
||||
return null;
|
||||
}
|
||||
return termQuery(nullValue(), null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
|
||||
failIfNoDocValues();
|
||||
return new DocValuesIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new UnsupportedOperationException("Queries on [feature] fields are not supported");
|
||||
}
|
||||
}
|
||||
|
||||
private FeatureFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FeatureFieldMapper clone() {
|
||||
return (FeatureFieldMapper) super.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
public FeatureFieldType fieldType() {
|
||||
return (FeatureFieldType) super.fieldType();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
|
||||
float value;
|
||||
if (context.externalValueSet()) {
|
||||
Object v = context.externalValue();
|
||||
if (v instanceof Number) {
|
||||
value = ((Number) v).floatValue();
|
||||
} else {
|
||||
value = Float.parseFloat(v.toString());
|
||||
}
|
||||
} else if (context.parser().currentToken() == Token.VALUE_NULL) {
|
||||
// skip
|
||||
return;
|
||||
} else {
|
||||
value = context.parser().floatValue();
|
||||
}
|
||||
|
||||
if (context.doc().getByKey(name()) != null) {
|
||||
throw new IllegalArgumentException("[feature] fields do not support indexing multiple values for the same field [" + name() +
|
||||
"] in the same document");
|
||||
}
|
||||
|
||||
if (fieldType().positiveScoreImpact() == false) {
|
||||
value = 1 / value;
|
||||
}
|
||||
|
||||
context.doc().addWithKey(name(), new FeatureField("_feature", name(), value));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
|
||||
super.doXContentBody(builder, includeDefaults, params);
|
||||
|
||||
if (includeDefaults || fieldType().nullValue() != null) {
|
||||
builder.field("null_value", fieldType().nullValue());
|
||||
}
|
||||
|
||||
if (includeDefaults || fieldType().positiveScoreImpact() == false) {
|
||||
builder.field("positive_score_impact", fieldType().positiveScoreImpact());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,151 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This meta field only exists because feature fields index everything into a
|
||||
* common _feature field and Elasticsearch has a custom codec that complains
|
||||
* when fields exist in the index and not in mappings.
|
||||
*/
|
||||
public class FeatureMetaFieldMapper extends MetadataFieldMapper {
|
||||
|
||||
public static final String NAME = "_feature";
|
||||
|
||||
public static final String CONTENT_TYPE = "_feature";
|
||||
|
||||
public static class Defaults {
|
||||
public static final MappedFieldType FIELD_TYPE = new FeatureMetaFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
|
||||
FIELD_TYPE.setTokenized(true);
|
||||
FIELD_TYPE.setStored(false);
|
||||
FIELD_TYPE.setOmitNorms(true);
|
||||
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
FIELD_TYPE.setName(NAME);
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder extends MetadataFieldMapper.Builder<Builder, FeatureMetaFieldMapper> {
|
||||
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FeatureMetaFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new FeatureMetaFieldMapper(fieldType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements MetadataFieldMapper.TypeParser {
|
||||
@Override
|
||||
public MetadataFieldMapper.Builder<?,?> parse(String name,
|
||||
Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
return new Builder(parserContext.mapperService().fullName(NAME));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetadataFieldMapper getDefault(MappedFieldType fieldType, ParserContext context) {
|
||||
final Settings indexSettings = context.mapperService().getIndexSettings().getSettings();
|
||||
if (fieldType != null) {
|
||||
return new FeatureMetaFieldMapper(indexSettings, fieldType);
|
||||
} else {
|
||||
return parse(NAME, Collections.emptyMap(), context)
|
||||
.build(new BuilderContext(indexSettings, new ContentPath(1)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static final class FeatureMetaFieldType extends MappedFieldType {
|
||||
|
||||
public FeatureMetaFieldType() {
|
||||
}
|
||||
|
||||
protected FeatureMetaFieldType(FeatureMetaFieldType ref) {
|
||||
super(ref);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FeatureMetaFieldType clone() {
|
||||
return new FeatureMetaFieldType(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query existsQuery(QueryShardContext context) {
|
||||
throw new UnsupportedOperationException("Cannot run exists query on [_feature]");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new UnsupportedOperationException("The [_feature] field may not be queried directly");
|
||||
}
|
||||
}
|
||||
|
||||
private FeatureMetaFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing.clone(), indexSettings);
|
||||
}
|
||||
|
||||
private FeatureMetaFieldMapper(MappedFieldType fieldType, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
|
||||
throw new AssertionError("Should never be called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postParse(ParseContext context) throws IOException {}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -19,21 +19,37 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper.TypeParser;
|
||||
import org.elasticsearch.index.query.FeatureQueryBuilder;
|
||||
import org.elasticsearch.plugins.MapperPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.SearchPlugin;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class MapperExtrasPlugin extends Plugin implements MapperPlugin {
|
||||
public class MapperExtrasPlugin extends Plugin implements MapperPlugin, SearchPlugin {
|
||||
|
||||
@Override
|
||||
public Map<String, Mapper.TypeParser> getMappers() {
|
||||
Map<String, Mapper.TypeParser> mappers = new LinkedHashMap<>();
|
||||
mappers.put(ScaledFloatFieldMapper.CONTENT_TYPE, new ScaledFloatFieldMapper.TypeParser());
|
||||
mappers.put(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser());
|
||||
mappers.put(FeatureFieldMapper.CONTENT_TYPE, new FeatureFieldMapper.TypeParser());
|
||||
return Collections.unmodifiableMap(mappers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, TypeParser> getMetadataMappers() {
|
||||
return Collections.singletonMap(FeatureMetaFieldMapper.CONTENT_TYPE, new FeatureMetaFieldMapper.TypeParser());
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<QuerySpec<?>> getQueries() {
|
||||
return Collections.singletonList(
|
||||
new QuerySpec<>(FeatureQueryBuilder.NAME, FeatureQueryBuilder::new, p -> FeatureQueryBuilder.PARSER.parse(p, null)));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,354 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.document.FeatureField;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.FeatureFieldMapper.FeatureFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Query to run on a [feature] field.
|
||||
*/
|
||||
public final class FeatureQueryBuilder extends AbstractQueryBuilder<FeatureQueryBuilder> {
|
||||
|
||||
/**
|
||||
* Scoring function for a [feature] field.
|
||||
*/
|
||||
public abstract static class ScoreFunction {
|
||||
|
||||
private ScoreFunction() {} // prevent extensions by users
|
||||
|
||||
abstract void writeTo(StreamOutput out) throws IOException;
|
||||
|
||||
abstract Query toQuery(String feature, boolean positiveScoreImpact) throws IOException;
|
||||
|
||||
abstract void doXContent(XContentBuilder builder) throws IOException;
|
||||
|
||||
/**
|
||||
* A scoring function that scores documents as {@code Math.log(scalingFactor + S)}
|
||||
* where S is the value of the static feature.
|
||||
*/
|
||||
public static class Log extends ScoreFunction {
|
||||
|
||||
private static final ConstructingObjectParser<Log, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"log", a -> new Log((Float) a[0]));
|
||||
static {
|
||||
PARSER.declareFloat(ConstructingObjectParser.constructorArg(), new ParseField("scaling_factor"));
|
||||
}
|
||||
|
||||
private final float scalingFactor;
|
||||
|
||||
public Log(float scalingFactor) {
|
||||
this.scalingFactor = scalingFactor;
|
||||
}
|
||||
|
||||
private Log(StreamInput in) throws IOException {
|
||||
this(in.readFloat());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
Log that = (Log) obj;
|
||||
return scalingFactor == that.scalingFactor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Float.hashCode(scalingFactor);
|
||||
}
|
||||
|
||||
@Override
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeByte((byte) 0);
|
||||
out.writeFloat(scalingFactor);
|
||||
}
|
||||
|
||||
@Override
|
||||
void doXContent(XContentBuilder builder) throws IOException {
|
||||
builder.startObject("log");
|
||||
builder.field("scaling_factor", scalingFactor);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
Query toQuery(String feature, boolean positiveScoreImpact) throws IOException {
|
||||
if (positiveScoreImpact == false) {
|
||||
throw new IllegalArgumentException("Cannot use the [log] function with a field that has a negative score impact as " +
|
||||
"it would trigger negative scores");
|
||||
}
|
||||
return FeatureField.newLogQuery("_feature", feature, DEFAULT_BOOST, scalingFactor);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A scoring function that scores documents as {@code S / (S + pivot)} where S is
|
||||
* the value of the static feature.
|
||||
*/
|
||||
public static class Saturation extends ScoreFunction {
|
||||
|
||||
private static final ConstructingObjectParser<Saturation, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"saturation", a -> new Saturation((Float) a[0]));
|
||||
static {
|
||||
PARSER.declareFloat(ConstructingObjectParser.optionalConstructorArg(), new ParseField("pivot"));
|
||||
}
|
||||
|
||||
private final Float pivot;
|
||||
|
||||
/** Constructor with a default pivot, computed as the geometric average of
|
||||
* all feature values in the index. */
|
||||
public Saturation() {
|
||||
this((Float) null);
|
||||
}
|
||||
|
||||
public Saturation(float pivot) {
|
||||
this(Float.valueOf(pivot));
|
||||
}
|
||||
|
||||
private Saturation(Float pivot) {
|
||||
this.pivot = pivot;
|
||||
}
|
||||
|
||||
private Saturation(StreamInput in) throws IOException {
|
||||
this(in.readOptionalFloat());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
Saturation that = (Saturation) obj;
|
||||
return Objects.equals(pivot, that.pivot);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(pivot);
|
||||
}
|
||||
|
||||
@Override
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeByte((byte) 1);
|
||||
out.writeOptionalFloat(pivot);
|
||||
}
|
||||
|
||||
@Override
|
||||
void doXContent(XContentBuilder builder) throws IOException {
|
||||
builder.startObject("saturation");
|
||||
if (pivot != null) {
|
||||
builder.field("pivot", pivot);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
Query toQuery(String feature, boolean positiveScoreImpact) throws IOException {
|
||||
if (pivot == null) {
|
||||
return FeatureField.newSaturationQuery("_feature", feature);
|
||||
} else {
|
||||
return FeatureField.newSaturationQuery("_feature", feature, DEFAULT_BOOST, pivot);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A scoring function that scores documents as {@code S^exp / (S^exp + pivot^exp)}
|
||||
* where S is the value of the static feature.
|
||||
*/
|
||||
public static class Sigmoid extends ScoreFunction {
|
||||
|
||||
private static final ConstructingObjectParser<Sigmoid, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"sigmoid", a -> new Sigmoid((Float) a[0], ((Float) a[1]).floatValue()));
|
||||
static {
|
||||
PARSER.declareFloat(ConstructingObjectParser.constructorArg(), new ParseField("pivot"));
|
||||
PARSER.declareFloat(ConstructingObjectParser.constructorArg(), new ParseField("exponent"));
|
||||
}
|
||||
|
||||
private final float pivot;
|
||||
private final float exp;
|
||||
|
||||
public Sigmoid(float pivot, float exp) {
|
||||
this.pivot = pivot;
|
||||
this.exp = exp;
|
||||
}
|
||||
|
||||
private Sigmoid(StreamInput in) throws IOException {
|
||||
this(in.readFloat(), in.readFloat());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
Sigmoid that = (Sigmoid) obj;
|
||||
return pivot == that.pivot
|
||||
&& exp == that.exp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(pivot, exp);
|
||||
}
|
||||
|
||||
@Override
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeByte((byte) 2);
|
||||
out.writeFloat(pivot);
|
||||
out.writeFloat(exp);
|
||||
}
|
||||
|
||||
@Override
|
||||
void doXContent(XContentBuilder builder) throws IOException {
|
||||
builder.startObject("sigmoid");
|
||||
builder.field("pivot", pivot);
|
||||
builder.field("exponent", exp);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
Query toQuery(String feature, boolean positiveScoreImpact) throws IOException {
|
||||
return FeatureField.newSigmoidQuery("_feature", feature, DEFAULT_BOOST, pivot, exp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static ScoreFunction readScoreFunction(StreamInput in) throws IOException {
|
||||
byte b = in.readByte();
|
||||
switch (b) {
|
||||
case 0:
|
||||
return new ScoreFunction.Log(in);
|
||||
case 1:
|
||||
return new ScoreFunction.Saturation(in);
|
||||
case 2:
|
||||
return new ScoreFunction.Sigmoid(in);
|
||||
default:
|
||||
throw new IOException("Illegal score function id: " + b);
|
||||
}
|
||||
}
|
||||
|
||||
public static ConstructingObjectParser<FeatureQueryBuilder, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"feature", args -> {
|
||||
final String field = (String) args[0];
|
||||
final float boost = args[1] == null ? DEFAULT_BOOST : (Float) args[1];
|
||||
final String queryName = (String) args[2];
|
||||
long numNonNulls = Arrays.stream(args, 3, args.length).filter(Objects::nonNull).count();
|
||||
final FeatureQueryBuilder query;
|
||||
if (numNonNulls > 1) {
|
||||
throw new IllegalArgumentException("Can only specify one of [log], [saturation] and [sigmoid]");
|
||||
} else if (numNonNulls == 0) {
|
||||
query = new FeatureQueryBuilder(field, new ScoreFunction.Saturation());
|
||||
} else {
|
||||
ScoreFunction scoreFunction = (ScoreFunction) Arrays.stream(args, 3, args.length)
|
||||
.filter(Objects::nonNull)
|
||||
.findAny()
|
||||
.get();
|
||||
query = new FeatureQueryBuilder(field, scoreFunction);
|
||||
}
|
||||
query.boost(boost);
|
||||
query.queryName(queryName);
|
||||
return query;
|
||||
});
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("field"));
|
||||
PARSER.declareFloat(ConstructingObjectParser.optionalConstructorArg(), BOOST_FIELD);
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NAME_FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
|
||||
ScoreFunction.Log.PARSER, new ParseField("log"));
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
|
||||
ScoreFunction.Saturation.PARSER, new ParseField("saturation"));
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(),
|
||||
ScoreFunction.Sigmoid.PARSER, new ParseField("sigmoid"));
|
||||
}
|
||||
|
||||
public static final String NAME = "feature";
|
||||
|
||||
private final String field;
|
||||
private final ScoreFunction scoreFunction;
|
||||
|
||||
public FeatureQueryBuilder(String field, ScoreFunction scoreFunction) {
|
||||
this.field = Objects.requireNonNull(field);
|
||||
this.scoreFunction = Objects.requireNonNull(scoreFunction);
|
||||
}
|
||||
|
||||
public FeatureQueryBuilder(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
this.field = in.readString();
|
||||
this.scoreFunction = readScoreFunction(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeString(field);
|
||||
scoreFunction.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(getName());
|
||||
builder.field("field", field);
|
||||
scoreFunction.doXContent(builder);
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
final MappedFieldType ft = context.fieldMapper(field);
|
||||
if (ft == null) {
|
||||
return new MatchNoDocsQuery();
|
||||
}
|
||||
if (ft instanceof FeatureFieldType == false) {
|
||||
throw new IllegalArgumentException("[feature] query only works on [feature] fields, not [" + ft.typeName() + "]");
|
||||
}
|
||||
final FeatureFieldType fft = (FeatureFieldType) ft;
|
||||
return scoreFunction.toQuery(field, fft.positiveScoreImpact());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(FeatureQueryBuilder other) {
|
||||
return Objects.equals(field, other.field) && Objects.equals(scoreFunction, other.scoreFunction);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(field, scoreFunction);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,173 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.TermFrequencyAttribute;
|
||||
import org.apache.lucene.document.FeatureField;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
public class FeatureFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
IndexService indexService;
|
||||
DocumentMapperParser parser;
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
indexService = createIndex("test");
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(MapperExtrasPlugin.class);
|
||||
}
|
||||
|
||||
private static int getFrequency(TokenStream tk) throws IOException {
|
||||
TermFrequencyAttribute freqAttribute = tk.addAttribute(TermFrequencyAttribute.class);
|
||||
tk.reset();
|
||||
assertTrue(tk.incrementToken());
|
||||
int freq = freqAttribute.getTermFrequency();
|
||||
assertFalse(tk.incrementToken());
|
||||
return freq;
|
||||
}
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "feature").endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||
.bytes(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", 10)
|
||||
.endObject()),
|
||||
XContentType.JSON));
|
||||
|
||||
IndexableField[] fields = doc1.rootDoc().getFields("_feature");
|
||||
assertEquals(1, fields.length);
|
||||
assertThat(fields[0], Matchers.instanceOf(FeatureField.class));
|
||||
FeatureField featureField1 = (FeatureField) fields[0];
|
||||
|
||||
ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||
.bytes(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", 12)
|
||||
.endObject()),
|
||||
XContentType.JSON));
|
||||
|
||||
FeatureField featureField2 = (FeatureField) doc2.rootDoc().getFields("_feature")[0];
|
||||
|
||||
int freq1 = getFrequency(featureField1.tokenStream(null, null));
|
||||
int freq2 = getFrequency(featureField2.tokenStream(null, null));
|
||||
assertTrue(freq1 < freq2);
|
||||
}
|
||||
|
||||
public void testNegativeScoreImpact() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "feature")
|
||||
.field("positive_score_impact", false).endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
ParsedDocument doc1 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||
.bytes(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", 10)
|
||||
.endObject()),
|
||||
XContentType.JSON));
|
||||
|
||||
IndexableField[] fields = doc1.rootDoc().getFields("_feature");
|
||||
assertEquals(1, fields.length);
|
||||
assertThat(fields[0], Matchers.instanceOf(FeatureField.class));
|
||||
FeatureField featureField1 = (FeatureField) fields[0];
|
||||
|
||||
ParsedDocument doc2 = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||
.bytes(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", 12)
|
||||
.endObject()),
|
||||
XContentType.JSON));
|
||||
|
||||
FeatureField featureField2 = (FeatureField) doc2.rootDoc().getFields("_feature")[0];
|
||||
|
||||
int freq1 = getFrequency(featureField1.tokenStream(null, null));
|
||||
int freq2 = getFrequency(featureField2.tokenStream(null, null));
|
||||
assertTrue(freq1 > freq2);
|
||||
}
|
||||
|
||||
public void testRejectMultiValuedFields() throws MapperParsingException, IOException {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "feature").endObject().startObject("foo")
|
||||
.startObject("properties").startObject("field").field("type", "feature").endObject().endObject()
|
||||
.endObject().endObject().endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
|
||||
MapperParsingException e = null;/*expectThrows(MapperParsingException.class,
|
||||
() -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||
.bytes(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("field", Arrays.asList(10, 20))
|
||||
.endObject()),
|
||||
XContentType.JSON)));
|
||||
assertEquals("[feature] fields do not support indexing multiple values for the same field [field] in the same document",
|
||||
e.getCause().getMessage());*/
|
||||
|
||||
e = expectThrows(MapperParsingException.class,
|
||||
() -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||
.bytes(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startArray("foo")
|
||||
.startObject()
|
||||
.field("field", 10)
|
||||
.endObject()
|
||||
.startObject()
|
||||
.field("field", 20)
|
||||
.endObject()
|
||||
.endArray()
|
||||
.endObject()),
|
||||
XContentType.JSON)));
|
||||
assertEquals("[feature] fields do not support indexing multiple values for the same field [foo.field] in the same document",
|
||||
e.getCause().getMessage());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.junit.Before;
|
||||
|
||||
public class FeatureFieldTypeTests extends FieldTypeTestCase {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new FeatureFieldMapper.FeatureFieldType();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setupProperties() {
|
||||
addModifier(new Modifier("positive_score_impact", false) {
|
||||
@Override
|
||||
public void modify(MappedFieldType ft) {
|
||||
FeatureFieldMapper.FeatureFieldType tft = (FeatureFieldMapper.FeatureFieldType)ft;
|
||||
tft.setPositiveScoreImpact(tft.positiveScoreImpact() == false);
|
||||
}
|
||||
@Override
|
||||
public void normalizeOther(MappedFieldType other) {
|
||||
super.normalizeOther(other);
|
||||
((FeatureFieldMapper.FeatureFieldType) other).setPositiveScoreImpact(true);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
public class FeatureMetaFieldMapperTests extends ESSingleNodeTestCase {
|
||||
|
||||
IndexService indexService;
|
||||
DocumentMapperParser parser;
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
indexService = createIndex("test");
|
||||
parser = indexService.mapperService().documentMapperParser();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return pluginList(MapperExtrasPlugin.class);
|
||||
}
|
||||
|
||||
public void testBasics() throws Exception {
|
||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "feature").endObject().endObject()
|
||||
.endObject().endObject());
|
||||
|
||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
||||
|
||||
assertEquals(mapping, mapper.mappingSource().toString());
|
||||
assertNotNull(mapper.metadataMapper(FeatureMetaFieldMapper.class));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
public class FeatureMetaFieldTypeTests extends FieldTypeTestCase {
|
||||
|
||||
@Override
|
||||
protected MappedFieldType createDefaultFieldType() {
|
||||
return new FeatureMetaFieldMapper.FeatureMetaFieldType();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.document.FeatureField;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.index.mapper.MapperExtrasPlugin;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.FeatureQueryBuilder.ScoreFunction;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.either;
|
||||
|
||||
public class FeatureQueryBuilderTests extends AbstractQueryTestCase<FeatureQueryBuilder> {
|
||||
|
||||
@Override
|
||||
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
|
||||
for (String type : getCurrentTypes()) {
|
||||
mapperService.merge(type, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(type,
|
||||
"my_feature_field", "type=feature",
|
||||
"my_negative_feature_field", "type=feature,positive_score_impact=false"))), MapperService.MergeReason.MAPPING_UPDATE);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||
return Collections.singleton(MapperExtrasPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FeatureQueryBuilder doCreateTestQueryBuilder() {
|
||||
ScoreFunction function;
|
||||
switch (random().nextInt(3)) {
|
||||
case 0:
|
||||
function = new ScoreFunction.Log(1 + randomFloat());
|
||||
break;
|
||||
case 1:
|
||||
if (randomBoolean()) {
|
||||
function = new ScoreFunction.Saturation();
|
||||
} else {
|
||||
function = new ScoreFunction.Saturation(randomFloat());
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
function = new ScoreFunction.Sigmoid(randomFloat(), randomFloat());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError();
|
||||
}
|
||||
return new FeatureQueryBuilder("my_feature_field", function);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(FeatureQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
|
||||
Class<?> expectedClass = FeatureField.newSaturationQuery("", "", 1, 1).getClass();
|
||||
assertThat(query, either(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(expectedClass)));
|
||||
}
|
||||
|
||||
@Override
|
||||
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/30605")
|
||||
public void testUnknownField() {
|
||||
super.testUnknownField();
|
||||
}
|
||||
|
||||
public void testDefaultScoreFunction() throws IOException {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
String query = "{\n" +
|
||||
" \"feature\" : {\n" +
|
||||
" \"field\": \"my_feature_field\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
|
||||
assertEquals(FeatureField.newSaturationQuery("_feature", "my_feature_field"), parsedQuery);
|
||||
}
|
||||
|
||||
public void testIllegalField() throws IOException {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
String query = "{\n" +
|
||||
" \"feature\" : {\n" +
|
||||
" \"field\": \"" + STRING_FIELD_NAME + "\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(query).toQuery(createShardContext()));
|
||||
assertEquals("[feature] query only works on [feature] fields, not [text]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testIllegalCombination() throws IOException {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
String query = "{\n" +
|
||||
" \"feature\" : {\n" +
|
||||
" \"field\": \"my_negative_feature_field\",\n" +
|
||||
" \"log\" : {\n" +
|
||||
" \"scaling_factor\": 4.5\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(query).toQuery(createShardContext()));
|
||||
assertEquals(
|
||||
"Cannot use the [log] function with a field that has a negative score impact as it would trigger negative scores",
|
||||
e.getMessage());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,160 @@
|
|||
setup:
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: "The feature field/query was introduced in 7.0.0"
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
number_of_replicas: 0
|
||||
mappings:
|
||||
_doc:
|
||||
properties:
|
||||
pagerank:
|
||||
type: feature
|
||||
url_length:
|
||||
type: feature
|
||||
positive_score_impact: false
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: _doc
|
||||
id: 1
|
||||
body:
|
||||
pagerank: 10
|
||||
url_length: 50
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: _doc
|
||||
id: 2
|
||||
body:
|
||||
pagerank: 100
|
||||
url_length: 20
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
---
|
||||
"Positive log":
|
||||
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
query:
|
||||
feature:
|
||||
field: pagerank
|
||||
log:
|
||||
scaling_factor: 3
|
||||
|
||||
- match:
|
||||
hits.total: 2
|
||||
|
||||
- match:
|
||||
hits.hits.0._id: "2"
|
||||
|
||||
- match:
|
||||
hits.hits.1._id: "1"
|
||||
|
||||
---
|
||||
"Positive saturation":
|
||||
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
query:
|
||||
feature:
|
||||
field: pagerank
|
||||
saturation:
|
||||
pivot: 20
|
||||
|
||||
- match:
|
||||
hits.total: 2
|
||||
|
||||
- match:
|
||||
hits.hits.0._id: "2"
|
||||
|
||||
- match:
|
||||
hits.hits.1._id: "1"
|
||||
|
||||
---
|
||||
"Positive sigmoid":
|
||||
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
query:
|
||||
feature:
|
||||
field: pagerank
|
||||
sigmoid:
|
||||
pivot: 20
|
||||
exponent: 0.6
|
||||
|
||||
- match:
|
||||
hits.total: 2
|
||||
|
||||
- match:
|
||||
hits.hits.0._id: "2"
|
||||
|
||||
- match:
|
||||
hits.hits.1._id: "1"
|
||||
|
||||
---
|
||||
"Negative log":
|
||||
|
||||
- do:
|
||||
catch: bad_request
|
||||
search:
|
||||
body:
|
||||
query:
|
||||
feature:
|
||||
field: url_length
|
||||
log:
|
||||
scaling_factor: 3
|
||||
|
||||
---
|
||||
"Negative saturation":
|
||||
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
query:
|
||||
feature:
|
||||
field: url_length
|
||||
saturation:
|
||||
pivot: 20
|
||||
|
||||
- match:
|
||||
hits.total: 2
|
||||
|
||||
- match:
|
||||
hits.hits.0._id: "2"
|
||||
|
||||
- match:
|
||||
hits.hits.1._id: "1"
|
||||
|
||||
---
|
||||
"Negative sigmoid":
|
||||
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
query:
|
||||
feature:
|
||||
field: url_length
|
||||
sigmoid:
|
||||
pivot: 20
|
||||
exponent: 0.6
|
||||
|
||||
- match:
|
||||
hits.total: 2
|
||||
|
||||
- match:
|
||||
hits.hits.0._id: "2"
|
||||
|
||||
- match:
|
||||
hits.hits.1._id: "1"
|
|
@ -1 +0,0 @@
|
|||
473a7f4d955f132bb498482648266653f8da85bd
|
|
@ -0,0 +1 @@
|
|||
452c9a9f86b79b9b3eaa7d6aa782e189d5bcfe8f
|
|
@ -1 +0,0 @@
|
|||
c5a72b9a790e2552248c8bbb36af47c4c399ba27
|
|
@ -0,0 +1 @@
|
|||
48c76a922bdfc7f50b1b6fe22e9456c555f3f990
|
|
@ -1 +0,0 @@
|
|||
14f680ab9b886c7c5224ff682a7fa70b6df44a05
|
|
@ -0,0 +1 @@
|
|||
4db5777df468b0867ff6539c9ab687e0ed6cab41
|
|
@ -1 +0,0 @@
|
|||
e033c68c9ec1ba9cd8439758adf7eb5fee22acef
|
|
@ -0,0 +1 @@
|
|||
0e09e6b011ab2b1a0e3e0e1df2ab2a91dca8ba23
|
|
@ -1 +0,0 @@
|
|||
08df0a5029f11c109b22064dec78c05dfa25f9e3
|
|
@ -0,0 +1 @@
|
|||
ceefa0f9789ab9ea5c8ab9f67ed7a601a3ae6aa9
|
|
@ -1 +0,0 @@
|
|||
a9d1819b2b13f134f6a605ab5a59ce3c602c0460
|
|
@ -0,0 +1 @@
|
|||
b013adc183e52a74795ad3d3032f4d0f9db30b73
|
|
@ -1 +0,0 @@
|
|||
47bc91ccb0cdf0c1c404646ffe0d5fd6b020a4ab
|
|
@ -0,0 +1 @@
|
|||
95300f29418f60e57e022d934d3462be9e1e2225
|
|
@ -18,33 +18,8 @@
|
|||
*/
|
||||
package org.elasticsearch.upgrades;
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.ObjectPath;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
|
||||
import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING;
|
||||
import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public abstract class AbstractRollingTestCase extends ESRestTestCase {
|
||||
protected enum ClusterType {
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.client.Response;
|
|||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.ObjectPath;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.upgrades;
|
||||
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.junit.Before;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.junit.Assume.assumeThat;
|
||||
|
||||
/**
|
||||
* Basic tests for simple xpack functionality that are only run if the
|
||||
* cluster is the on the "zip" distribution.
|
||||
*/
|
||||
public class XPackIT extends AbstractRollingTestCase {
|
||||
@Before
|
||||
public void skipIfNotXPack() {
|
||||
assumeThat("test is only supported if the distribution contains xpack",
|
||||
System.getProperty("tests.distribution"), equalTo("zip"));
|
||||
assumeThat("running this on the unupgraded cluster would change its state and it wouldn't work prior to 6.3 anyway",
|
||||
CLUSTER_TYPE, equalTo(ClusterType.UPGRADED));
|
||||
/*
|
||||
* *Mostly* we want this for when we're upgrading from pre-6.3's
|
||||
* zip distribution which doesn't contain xpack to post 6.3's zip
|
||||
* distribution which *does* contain xpack. But we'll also run it
|
||||
* on all upgrades for completeness's sake.
|
||||
*/
|
||||
}
|
||||
|
||||
/**
|
||||
* Test a basic feature (SQL) which doesn't require any trial license.
|
||||
* Note that the test methods on this class can run in any order so we
|
||||
* <strong>might</strong> have already installed a trial license.
|
||||
*/
|
||||
public void testBasicFeature() throws IOException {
|
||||
Request bulk = new Request("POST", "/sql_test/doc/_bulk");
|
||||
bulk.setJsonEntity(
|
||||
"{\"index\":{}}\n"
|
||||
+ "{\"f\": \"1\"}\n"
|
||||
+ "{\"index\":{}}\n"
|
||||
+ "{\"f\": \"2\"}\n");
|
||||
bulk.addParameter("refresh", "true");
|
||||
client().performRequest(bulk);
|
||||
|
||||
Request sql = new Request("POST", "/_xpack/sql");
|
||||
sql.setJsonEntity("{\"query\": \"SELECT * FROM sql_test WHERE f > 1 ORDER BY f ASC\"}");
|
||||
String response = EntityUtils.toString(client().performRequest(sql).getEntity());
|
||||
assertEquals("{\"columns\":[{\"name\":\"f\",\"type\":\"text\"}],\"rows\":[[\"2\"]]}", response);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test creating a trial license and using it. This is interesting because
|
||||
* our other tests test cover starting a new cluster with the default
|
||||
* distribution and enabling the trial license but this test is the only
|
||||
* one that can upgrade from the oss distribution to the default
|
||||
* distribution with xpack and the create a trial license. We don't
|
||||
* <strong>do</strong> a lot with the trial license because for the most
|
||||
* part those things are tested elsewhere, off in xpack. But we do use the
|
||||
* trial license a little bit to make sure that it works.
|
||||
*/
|
||||
public void testTrialLicense() throws IOException {
|
||||
Request startTrial = new Request("POST", "/_xpack/license/start_trial");
|
||||
startTrial.addParameter("acknowledge", "true");
|
||||
client().performRequest(startTrial);
|
||||
|
||||
String noJobs = EntityUtils.toString(
|
||||
client().performRequest(new Request("GET", "/_xpack/ml/anomaly_detectors")).getEntity());
|
||||
assertEquals("{\"count\":0,\"jobs\":[]}", noJobs);
|
||||
|
||||
Request createJob = new Request("PUT", "/_xpack/ml/anomaly_detectors/test_job");
|
||||
createJob.setJsonEntity(
|
||||
"{\n"
|
||||
+ " \"analysis_config\" : {\n"
|
||||
+ " \"bucket_span\": \"10m\",\n"
|
||||
+ " \"detectors\": [\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"sum\",\n"
|
||||
+ " \"field_name\": \"total\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"data_description\": {\n"
|
||||
+ " \"time_field\": \"timestamp\",\n"
|
||||
+ " \"time_format\": \"epoch_ms\"\n"
|
||||
+ " }\n"
|
||||
+ "}\n");
|
||||
client().performRequest(createJob);
|
||||
}
|
||||
}
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.http;
|
||||
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
|
@ -222,8 +221,8 @@ public class ContextAndHeaderTransportIT extends HttpSmokeTestCase {
|
|||
public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws IOException {
|
||||
final String IRRELEVANT_HEADER = "SomeIrrelevantHeader";
|
||||
Request request = new Request("GET", "/" + queryIndex + "/_search");
|
||||
request.setHeaders(new BasicHeader(CUSTOM_HEADER, randomHeaderValue),
|
||||
new BasicHeader(IRRELEVANT_HEADER, randomHeaderValue));
|
||||
request.addHeader(CUSTOM_HEADER, randomHeaderValue);
|
||||
request.addHeader(IRRELEVANT_HEADER, randomHeaderValue);
|
||||
Response response = getRestClient().performRequest(request);
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
List<RequestAndHeaders> searchRequests = getRequests(SearchRequest.class);
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.http;
|
||||
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
|
||||
|
@ -33,7 +32,8 @@ public class CorsNotSetIT extends HttpSmokeTestCase {
|
|||
public void testCorsSettingDefaultBehaviourDoesNotReturnAnything() throws IOException {
|
||||
String corsValue = "http://localhost:9200";
|
||||
Request request = new Request("GET", "/");
|
||||
request.setHeaders(new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue));
|
||||
request.addHeader("User-Agent", "Mozilla Bar");
|
||||
request.addHeader("Origin", corsValue);
|
||||
Response response = getRestClient().performRequest(request);
|
||||
assertThat(response.getStatusLine().getStatusCode(), is(200));
|
||||
assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue());
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.http;
|
||||
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
|
@ -53,25 +52,29 @@ public class CorsRegexIT extends HttpSmokeTestCase {
|
|||
}
|
||||
|
||||
public void testThatRegularExpressionWorksOnMatch() throws IOException {
|
||||
String corsValue = "http://localhost:9200";
|
||||
Request request = new Request("GET", "/");
|
||||
request.setHeaders(new BasicHeader("User-Agent", "Mozilla Bar"),
|
||||
new BasicHeader("Origin", corsValue));
|
||||
Response response = getRestClient().performRequest(request);
|
||||
assertResponseWithOriginheader(response, corsValue);
|
||||
|
||||
corsValue = "https://localhost:9201";
|
||||
request.setHeaders(new BasicHeader("User-Agent", "Mozilla Bar"),
|
||||
new BasicHeader("Origin", corsValue));
|
||||
response = getRestClient().performRequest(request);
|
||||
assertResponseWithOriginheader(response, corsValue);
|
||||
assertThat(response.getHeader("Access-Control-Allow-Credentials"), is("true"));
|
||||
{
|
||||
String corsValue = "http://localhost:9200";
|
||||
Request request = new Request("GET", "/");
|
||||
request.addHeader("User-Agent", "Mozilla Bar");
|
||||
request.addHeader("Origin", corsValue);
|
||||
Response response = getRestClient().performRequest(request);
|
||||
assertResponseWithOriginHeader(response, corsValue);
|
||||
}
|
||||
{
|
||||
String corsValue = "https://localhost:9201";
|
||||
Request request = new Request("GET", "/");
|
||||
request.addHeader("User-Agent", "Mozilla Bar");
|
||||
request.addHeader("Origin", corsValue);
|
||||
Response response = getRestClient().performRequest(request);
|
||||
assertResponseWithOriginHeader(response, corsValue);
|
||||
assertThat(response.getHeader("Access-Control-Allow-Credentials"), is("true"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testThatRegularExpressionReturnsForbiddenOnNonMatch() throws IOException {
|
||||
Request request = new Request("GET", "/");
|
||||
request.setHeaders(new BasicHeader("User-Agent", "Mozilla Bar"),
|
||||
new BasicHeader("Origin", "http://evil-host:9200"));
|
||||
request.addHeader("User-Agent", "Mozilla Bar");
|
||||
request.addHeader("Origin", "http://evil-host:9200");
|
||||
try {
|
||||
getRestClient().performRequest(request);
|
||||
fail("request should have failed");
|
||||
|
@ -85,7 +88,7 @@ public class CorsRegexIT extends HttpSmokeTestCase {
|
|||
|
||||
public void testThatSendingNoOriginHeaderReturnsNoAccessControlHeader() throws IOException {
|
||||
Request request = new Request("GET", "/");
|
||||
request.setHeaders(new BasicHeader("User-Agent", "Mozilla Bar"));
|
||||
request.addHeader("User-Agent", "Mozilla Bar");
|
||||
Response response = getRestClient().performRequest(request);
|
||||
assertThat(response.getStatusLine().getStatusCode(), is(200));
|
||||
assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue());
|
||||
|
@ -100,20 +103,20 @@ public class CorsRegexIT extends HttpSmokeTestCase {
|
|||
public void testThatPreFlightRequestWorksOnMatch() throws IOException {
|
||||
String corsValue = "http://localhost:9200";
|
||||
Request request = new Request("OPTIONS", "/");
|
||||
request.setHeaders(new BasicHeader("User-Agent", "Mozilla Bar"),
|
||||
new BasicHeader("Origin", corsValue),
|
||||
new BasicHeader("Access-Control-Request-Method", "GET"));
|
||||
request.addHeader("User-Agent", "Mozilla Bar");
|
||||
request.addHeader("Origin", corsValue);
|
||||
request.addHeader("Access-Control-Request-Method", "GET");
|
||||
Response response = getRestClient().performRequest(request);
|
||||
assertResponseWithOriginheader(response, corsValue);
|
||||
assertResponseWithOriginHeader(response, corsValue);
|
||||
assertNotNull(response.getHeader("Access-Control-Allow-Methods"));
|
||||
}
|
||||
|
||||
public void testThatPreFlightRequestReturnsNullOnNonMatch() throws IOException {
|
||||
String corsValue = "http://evil-host:9200";
|
||||
Request request = new Request("OPTIONS", "/");
|
||||
request.setHeaders(new BasicHeader("User-Agent", "Mozilla Bar"),
|
||||
new BasicHeader("Origin", corsValue),
|
||||
new BasicHeader("Access-Control-Request-Method", "GET"));
|
||||
request.addHeader("User-Agent", "Mozilla Bar");
|
||||
request.addHeader("Origin", corsValue);
|
||||
request.addHeader("Access-Control-Request-Method", "GET");
|
||||
try {
|
||||
getRestClient().performRequest(request);
|
||||
fail("request should have failed");
|
||||
|
@ -126,7 +129,7 @@ public class CorsRegexIT extends HttpSmokeTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
protected static void assertResponseWithOriginheader(Response response, String expectedCorsHeader) {
|
||||
private static void assertResponseWithOriginHeader(Response response, String expectedCorsHeader) {
|
||||
assertThat(response.getStatusLine().getStatusCode(), is(200));
|
||||
assertThat(response.getHeader("Access-Control-Allow-Origin"), is(expectedCorsHeader));
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.http;
|
||||
|
||||
import org.apache.http.HttpHeaders;
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
|
@ -39,7 +38,7 @@ public class HttpCompressionIT extends ESRestTestCase {
|
|||
|
||||
public void testCompressesResponseIfRequested() throws IOException {
|
||||
Request request = new Request("GET", "/");
|
||||
request.setHeaders(new BasicHeader(HttpHeaders.ACCEPT_ENCODING, GZIP_ENCODING));
|
||||
request.addHeader(HttpHeaders.ACCEPT_ENCODING, GZIP_ENCODING);
|
||||
Response response = client().performRequest(request);
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
assertEquals(GZIP_ENCODING, response.getHeader(HttpHeaders.CONTENT_ENCODING));
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.http;
|
||||
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
|
@ -47,7 +46,7 @@ public class NoHandlerIT extends HttpSmokeTestCase {
|
|||
private void runTestNoHandlerRespectsAcceptHeader(
|
||||
final String accept, final String contentType, final String expect) throws IOException {
|
||||
Request request = new Request("GET", "/foo/bar/baz/qux/quux");
|
||||
request.setHeaders(new BasicHeader("Accept", accept));
|
||||
request.addHeader("Accept", accept);
|
||||
final ResponseException e = expectThrows(ResponseException.class,
|
||||
() -> getRestClient().performRequest(request));
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.http;
|
||||
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
|
@ -26,8 +25,8 @@ import org.elasticsearch.plugins.Plugin;
|
|||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -62,7 +61,7 @@ public class ResponseHeaderPluginIT extends HttpSmokeTestCase {
|
|||
}
|
||||
|
||||
Request request = new Request("GET", "/_protected");
|
||||
request.setHeaders(new BasicHeader("Secret", "password"));
|
||||
request.addHeader("Secret", "password");
|
||||
Response authResponse = getRestClient().performRequest(request);
|
||||
assertThat(authResponse.getStatusLine().getStatusCode(), equalTo(200));
|
||||
assertThat(authResponse.getHeader("Secret"), equalTo("granted"));
|
||||
|
|
|
@ -20,6 +20,10 @@ setup:
|
|||
---
|
||||
"Get all aliases via /_alias":
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_index_3
|
||||
|
||||
- do:
|
||||
indices.get_alias: {}
|
||||
|
||||
|
@ -27,7 +31,41 @@ setup:
|
|||
- match: {test_index.aliases.test_blias: {}}
|
||||
- match: {test_index_2.aliases.test_alias: {}}
|
||||
- match: {test_index_2.aliases.test_blias: {}}
|
||||
- match: {test_index_3.aliases: {}}
|
||||
|
||||
---
|
||||
"Get aliases via /_alias/_all":
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_index_3
|
||||
|
||||
- do:
|
||||
indices.get_alias:
|
||||
name: _all
|
||||
|
||||
- match: {test_index.aliases.test_alias: {}}
|
||||
- match: {test_index.aliases.test_blias: {}}
|
||||
- match: {test_index_2.aliases.test_alias: {}}
|
||||
- match: {test_index_2.aliases.test_blias: {}}
|
||||
- is_false: test_index_3
|
||||
|
||||
---
|
||||
"Get aliases via /_alias/*":
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_index_3
|
||||
|
||||
- do:
|
||||
indices.get_alias:
|
||||
name: _all
|
||||
|
||||
- match: {test_index.aliases.test_alias: {}}
|
||||
- match: {test_index.aliases.test_blias: {}}
|
||||
- match: {test_index_2.aliases.test_alias: {}}
|
||||
- match: {test_index_2.aliases.test_blias: {}}
|
||||
- is_false: test_index_3
|
||||
|
||||
---
|
||||
"Get all aliases via /{index}/_alias/":
|
||||
|
|
|
@ -45,9 +45,8 @@ setup:
|
|||
"Nested doc version and seqIDs":
|
||||
|
||||
- skip:
|
||||
# fixed in 6.0.1
|
||||
version: " - 6.0.0"
|
||||
reason: "version and seq IDs where not accurate in previous versions"
|
||||
version: " - 6.3.99"
|
||||
reason: "object notation for docvalue_fields was introduced in 6.4"
|
||||
|
||||
- do:
|
||||
index:
|
||||
|
@ -61,7 +60,7 @@ setup:
|
|||
|
||||
- do:
|
||||
search:
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": ["_seq_no"]} }}, "version": true, "docvalue_fields" : ["_seq_no"] }
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": [ { "field": "_seq_no", "format": "use_field_mapping" } ]} }}, "version": true, "docvalue_fields" : [ { "field": "_seq_no", "format": "use_field_mapping" } ] }
|
||||
|
||||
- match: { hits.total: 1 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
|
@ -84,7 +83,7 @@ setup:
|
|||
|
||||
- do:
|
||||
search:
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": ["_seq_no"]} }}, "version": true, "docvalue_fields" : ["_seq_no"] }
|
||||
body: { "query" : { "nested" : { "path" : "nested_field", "query" : { "match_all" : {} }, "inner_hits" : { version: true, "docvalue_fields": [ { "field": "_seq_no", "format": "use_field_mapping" } ]} }}, "version": true, "docvalue_fields" : [ { "field": "_seq_no", "format": "use_field_mapping" } ] }
|
||||
|
||||
- match: { hits.total: 1 }
|
||||
- match: { hits.hits.0._index: "test" }
|
||||
|
|
|
@ -133,7 +133,53 @@ setup:
|
|||
|
||||
---
|
||||
"docvalue_fields":
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: format option was added in 6.4
|
||||
features: warnings
|
||||
- do:
|
||||
warnings:
|
||||
- 'Doc-value field [count] is not using a format. The output will change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass [format=use_field_mapping] with the doc value field in order to opt in for the future behaviour and ease the migration to 7.0.'
|
||||
search:
|
||||
body:
|
||||
docvalue_fields: [ "count" ]
|
||||
- match: { hits.hits.0.fields.count: [1] }
|
||||
|
||||
---
|
||||
"docvalue_fields as url param":
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: format option was added in 6.4
|
||||
features: warnings
|
||||
- do:
|
||||
warnings:
|
||||
- 'Doc-value field [count] is not using a format. The output will change in 7.0 when doc value fields get formatted based on mappings by default. It is recommended to pass [format=use_field_mapping] with the doc value field in order to opt in for the future behaviour and ease the migration to 7.0.'
|
||||
search:
|
||||
docvalue_fields: [ "count" ]
|
||||
- match: { hits.hits.0.fields.count: [1] }
|
||||
|
||||
---
|
||||
"docvalue_fields with default format":
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: format option was added in 6.4
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
docvalue_fields:
|
||||
- field: "count"
|
||||
format: "use_field_mapping"
|
||||
- match: { hits.hits.0.fields.count: [1] }
|
||||
|
||||
---
|
||||
"docvalue_fields with explicit format":
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: format option was added in 6.4
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
docvalue_fields:
|
||||
- field: "count"
|
||||
format: "#.0"
|
||||
- match: { hits.hits.0.fields.count: ["1.0"] }
|
||||
|
|
|
@ -62,6 +62,9 @@ setup:
|
|||
---
|
||||
"Docvalues_fields size limit":
|
||||
|
||||
- skip:
|
||||
version: " - 6.3.99"
|
||||
reason: "The object notation for docvalue_fields is only supported on 6.4+"
|
||||
- do:
|
||||
catch: /Trying to retrieve too many docvalue_fields\. Must be less than or equal to[:] \[2\] but was \[3\]\. This limit can be set by changing the \[index.max_docvalue_fields_search\] index level setting\./
|
||||
search:
|
||||
|
@ -69,7 +72,13 @@ setup:
|
|||
body:
|
||||
query:
|
||||
match_all: {}
|
||||
docvalue_fields: ["one", "two", "three"]
|
||||
docvalue_fields:
|
||||
- field: "one"
|
||||
format: "use_field_mapping"
|
||||
- field: "two"
|
||||
format: "use_field_mapping"
|
||||
- field: "three"
|
||||
format: "use_field_mapping"
|
||||
|
||||
---
|
||||
"Script_fields size limit":
|
||||
|
|
|
@ -51,6 +51,9 @@ setup:
|
|||
|
||||
---
|
||||
"Verify created repository":
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: AwaitsFix for https://github.com/elastic/elasticsearch/issues/30807
|
||||
- do:
|
||||
snapshot.verify_repository:
|
||||
repository: test_repo_get_2
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
b70d03784d06a643e096fae4d959200aa246ba16
|
|
@ -0,0 +1 @@
|
|||
96ab108569c77932ecb17c45421affece207df5c
|
|
@ -1 +0,0 @@
|
|||
d660a63ac0f7ab2772a45ae518518472bf620620
|
|
@ -0,0 +1 @@
|
|||
72d09ca50979f716a57f53f2de33d55023a166ec
|
|
@ -1 +0,0 @@
|
|||
bf8f9e8284a54af18545574cb4a530da0deb968a
|
|
@ -0,0 +1 @@
|
|||
e118e4d05070378516b9055184b74498ba528dee
|
|
@ -1 +0,0 @@
|
|||
9eaae9dcd4ec88227475cb81d3be9afa767f1b22
|
|
@ -0,0 +1 @@
|
|||
2b2ea6bfe6fa159bbf205bf7f7fa2ed2c22bbffc
|
|
@ -1 +0,0 @@
|
|||
cd15f0008742c84899d678cb0cecda06d0a6d63e
|
|
@ -0,0 +1 @@
|
|||
423e4fff9276101d845d6073dc6cd27504def207
|
|
@ -1 +0,0 @@
|
|||
5ce38b8610a7f402f2da3b0e408e508151d979c5
|
|
@ -0,0 +1 @@
|
|||
27561038da2edcae3ecc3a08b0a52824966af87a
|
|
@ -1 +0,0 @@
|
|||
53819f03a07050a4af28361d64395c86f2cea008
|
|
@ -0,0 +1 @@
|
|||
d7d422159f705261784d121e24877119d9c95083
|
|
@ -1 +0,0 @@
|
|||
8cdc0e2b65d146ed11f4d2507109e530d59ff33d
|
|
@ -0,0 +1 @@
|
|||
fc09508fde6ba87f241d7e3148d9e310c0db9cb9
|
|
@ -1 +0,0 @@
|
|||
e56090463703112ad64ad457d18bae9a5b2966b8
|
|
@ -0,0 +1 @@
|
|||
201fdf3432ff3fef0f48c38c2c0f482c144f6868
|
|
@ -1 +0,0 @@
|
|||
9faf974b77058e44a6d35e956db4f5fb67389dfa
|
|
@ -0,0 +1 @@
|
|||
917df8c8d08952a012a34050b183b6204ae7081b
|
|
@ -1 +0,0 @@
|
|||
b852b1fe70ef70736b2b1a9ad57eb93cbaed0423
|
|
@ -0,0 +1 @@
|
|||
caff84fa66cb0376835c39f3d4ca7dfd2177d8f4
|
|
@ -1 +0,0 @@
|
|||
d2fa99ec7140fcf35db16ac1feb78ef142750d39
|
|
@ -0,0 +1 @@
|
|||
e1bce61a9d9129a8d0fdd3127a84665d29f53eb0
|
|
@ -1 +0,0 @@
|
|||
c9963f60d3a0924b877a6f910650c5f2384822a0
|
|
@ -0,0 +1 @@
|
|||
3a2e4373d79fda968a078971efa2cb8ec9ff65b0
|
|
@ -1 +0,0 @@
|
|||
3f33ba54da5e0e125f4c5ef7dd800dd6185e4f61
|
|
@ -0,0 +1 @@
|
|||
7f14927e5c3c1c85c4c5b3681c28c5e36f241dda
|
|
@ -1 +0,0 @@
|
|||
bb3c18c987395dae6fe63744f5a50fd367ea5a74
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue