diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml
index 63133dd851b..6432ccb3ddd 100644
--- a/buildSrc/src/main/resources/checkstyle_suppressions.xml
+++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml
@@ -1197,7 +1197,6 @@
-
diff --git a/buildSrc/src/main/resources/forbidden/es-test-signatures.txt b/buildSrc/src/main/resources/forbidden/es-test-signatures.txt
index bd6744ee05f..08e591e1cfa 100644
--- a/buildSrc/src/main/resources/forbidden/es-test-signatures.txt
+++ b/buildSrc/src/main/resources/forbidden/es-test-signatures.txt
@@ -21,5 +21,7 @@ com.carrotsearch.randomizedtesting.annotations.Repeat @ Don't commit hardcoded r
org.apache.lucene.codecs.Codec#setDefault(org.apache.lucene.codecs.Codec) @ Use the SuppressCodecs("*") annotation instead
org.apache.lucene.util.LuceneTestCase$Slow @ Don't write slow tests
org.junit.Ignore @ Use AwaitsFix instead
+org.apache.lucene.util.LuceneTestCase$Nightly @ We don't run nightly tests at this point!
+com.carrotsearch.randomizedtesting.annotations.Nightly @ We don't run nightly tests at this point!
org.junit.Test @defaultMessage Just name your test method testFooBar
diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java
index 39d6bf767e1..2b72ef8b31b 100644
--- a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java
+++ b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java
@@ -176,12 +176,10 @@ public abstract class TransportNodesAction nodes = clusterState.nodes().getNodes();
+ nodesIds = filterNodeIds(clusterState.nodes(), resolveNodes(request, clusterState));
this.nodes = new DiscoveryNode[nodesIds.length];
for (int i = 0; i < nodesIds.length; i++) {
- this.nodes[i] = nodes.get(nodesIds[i]);
+ this.nodes[i] = clusterState.nodes().get(nodesIds[i]);
}
this.responses = new AtomicReferenceArray<>(this.nodesIds.length);
}
diff --git a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java
index 925940a3903..0ee5099dc69 100644
--- a/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java
+++ b/core/src/main/java/org/elasticsearch/bootstrap/BootstrapCheck.java
@@ -20,7 +20,7 @@
package org.elasticsearch.bootstrap;
import org.apache.lucene.util.Constants;
-import org.apache.lucene.util.SuppressForbidden;
+import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
diff --git a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java
index a195ab450b5..674522589ae 100644
--- a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java
+++ b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java
@@ -19,8 +19,8 @@
package org.elasticsearch.common.logging;
-import org.apache.lucene.util.SuppressForbidden;
import org.elasticsearch.common.Classes;
+import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java
index 6b1d24e47f1..188e3608cb3 100644
--- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java
+++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java
@@ -133,7 +133,6 @@ public class IndicesService extends AbstractLifecycleComponent i
private final TimeValue shardsClosedTimeout;
private final AnalysisRegistry analysisRegistry;
private final IndicesQueriesRegistry indicesQueriesRegistry;
- private final ClusterService clusterService;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final IndexScopedSettings indexScopeSetting;
private final IndicesFieldDataCache indicesFieldDataCache;
@@ -162,7 +161,7 @@ public class IndicesService extends AbstractLifecycleComponent i
public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv,
ClusterSettings clusterSettings, AnalysisRegistry analysisRegistry,
IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver,
- ClusterService clusterService, MapperRegistry mapperRegistry, NamedWriteableRegistry namedWriteableRegistry,
+ MapperRegistry mapperRegistry, NamedWriteableRegistry namedWriteableRegistry,
ThreadPool threadPool, IndexScopedSettings indexScopedSettings, CircuitBreakerService circuitBreakerService,
MetaStateService metaStateService) {
super(settings);
@@ -173,7 +172,6 @@ public class IndicesService extends AbstractLifecycleComponent i
this.indexStoreConfig = new IndexStoreConfig(settings);
this.analysisRegistry = analysisRegistry;
this.indicesQueriesRegistry = indicesQueriesRegistry;
- this.clusterService = clusterService;
this.indexNameExpressionResolver = indexNameExpressionResolver;
this.indicesRequestCache = new IndicesRequestCache(settings);
this.indicesQueryCache = new IndicesQueryCache(settings);
@@ -379,6 +377,7 @@ public class IndicesService extends AbstractLifecycleComponent i
*/
private synchronized IndexService createIndexService(final String reason, final NodeServicesProvider nodeServicesProvider, IndexMetaData indexMetaData, IndicesQueryCache indicesQueryCache, IndicesFieldDataCache indicesFieldDataCache, List builtInListeners, IndexingOperationListener... indexingOperationListeners) throws IOException {
final Index index = indexMetaData.getIndex();
+ final ClusterService clusterService = nodeServicesProvider.getClusterService();
final Predicate indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(index.getName(), indexExpression, clusterService.state());
final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexNameMatcher, indexScopeSetting);
logger.debug("creating Index [{}], shards [{}]/[{}{}] - reason [{}]",
diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java b/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java
index 5ee2232068f..c6b4f080b91 100644
--- a/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java
+++ b/core/src/main/java/org/elasticsearch/monitor/os/OsProbe.java
@@ -20,7 +20,7 @@
package org.elasticsearch.monitor.os;
import org.apache.lucene.util.Constants;
-import org.apache.lucene.util.SuppressForbidden;
+import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.monitor.Probes;
diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java
index a7289124704..c53adb08ce2 100644
--- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java
@@ -66,9 +66,10 @@ public class TransportNodesActionTests extends ESTestCase {
private ClusterService clusterService;
private CapturingTransport transport;
- private TestTransportNodesAction action;
+ private TransportService transportService;
public void testRequestIsSentToEachNode() throws Exception {
+ TransportNodesAction action = getTestTransportNodesAction();
TestNodesRequest request = new TestNodesRequest();
PlainActionFuture listener = new PlainActionFuture<>();
action.new AsyncAction(null, request, listener).start();
@@ -79,6 +80,7 @@ public class TransportNodesActionTests extends ESTestCase {
}
public void testNodesSelectors() {
+ TransportNodesAction action = getTestTransportNodesAction();
int numSelectors = randomIntBetween(1, 5);
Set nodeSelectors = new HashSet<>();
for (int i = 0; i < numSelectors; i++) {
@@ -98,10 +100,12 @@ public class TransportNodesActionTests extends ESTestCase {
}
public void testNewResponseNullArray() {
+ TransportNodesAction action = getTestTransportNodesAction();
expectThrows(NullPointerException.class, () -> action.newResponse(new TestNodesRequest(), null));
}
public void testNewResponse() {
+ TestTransportNodesAction action = getTestTransportNodesAction();
TestNodesRequest request = new TestNodesRequest();
List expectedNodeResponses = mockList(TestNodeResponse.class, randomIntBetween(0, 2));
expectedNodeResponses.add(new TestNodeResponse());
@@ -125,6 +129,19 @@ public class TransportNodesActionTests extends ESTestCase {
assertTrue(failures.containsAll(response.failures()));
}
+ public void testFiltering() throws Exception {
+ TransportNodesAction action = getFilteringTestTransportNodesAction(transportService);
+ TestNodesRequest request = new TestNodesRequest();
+ PlainActionFuture listener = new PlainActionFuture<>();
+ action.new AsyncAction(null, request, listener).start();
+ Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear();
+ // check requests were only sent to data nodes
+ for (String nodeTarget : capturedRequests.keySet()) {
+ assertTrue(clusterService.state().nodes().get(nodeTarget).isDataNode());
+ }
+ assertEquals(clusterService.state().nodes().getDataNodes().size(), capturedRequests.size());
+ }
+
private List mockList(Class clazz, int size) {
List failures = new ArrayList<>(size);
for (int i = 0; i < size; ++i) {
@@ -160,7 +177,7 @@ public class TransportNodesActionTests extends ESTestCase {
super.setUp();
transport = new CapturingTransport();
clusterService = createClusterService(THREAD_POOL);
- final TransportService transportService = new TransportService(transport, THREAD_POOL, clusterService.state().getClusterName());
+ transportService = new TransportService(transport, THREAD_POOL, clusterService.state().getClusterName());
transportService.start();
transportService.acceptIncomingRequests();
int numNodes = randomIntBetween(3, 10);
@@ -182,7 +199,17 @@ public class TransportNodesActionTests extends ESTestCase {
stateBuilder.nodes(discoBuilder);
ClusterState clusterState = stateBuilder.build();
setState(clusterService, clusterState);
- action = new TestTransportNodesAction(
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ super.tearDown();
+ clusterService.close();
+ transport.close();
+ }
+
+ public TestTransportNodesAction getTestTransportNodesAction() {
+ return new TestTransportNodesAction(
Settings.EMPTY,
THREAD_POOL,
clusterService,
@@ -194,11 +221,17 @@ public class TransportNodesActionTests extends ESTestCase {
);
}
- @After
- public void tearDown() throws Exception {
- super.tearDown();
- clusterService.close();
- transport.close();
+ public FilteringTestTransportNodesAction getFilteringTestTransportNodesAction(TransportService transportService) {
+ return new FilteringTestTransportNodesAction(
+ Settings.EMPTY,
+ THREAD_POOL,
+ clusterService,
+ transportService,
+ new ActionFilters(Collections.emptySet()),
+ TestNodesRequest::new,
+ TestNodeRequest::new,
+ ThreadPool.Names.SAME
+ );
}
private static DiscoveryNode newNode(int nodeId, Map attributes, Set roles) {
@@ -243,6 +276,21 @@ public class TransportNodesActionTests extends ESTestCase {
}
}
+ private static class FilteringTestTransportNodesAction
+ extends TestTransportNodesAction {
+
+ FilteringTestTransportNodesAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService
+ transportService, ActionFilters actionFilters, Supplier request,
+ Supplier nodeRequest, String nodeExecutor) {
+ super(settings, threadPool, clusterService, transportService, actionFilters, request, nodeRequest, nodeExecutor);
+ }
+
+ @Override
+ protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) {
+ return nodes.getDataNodes().keys().toArray(String.class);
+ }
+ }
+
private static class TestNodesRequest extends BaseNodesRequest {
TestNodesRequest(String... nodesIds) {
super(nodesIds);
diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
index 4246e075b4c..51df3ee0386 100644
--- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
+++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java
@@ -2184,7 +2184,7 @@ public class InternalEngineTests extends ESTestCase {
}
public void testDocStats() throws IOException {
- final int numDocs = randomIntBetween(1, 10);
+ final int numDocs = randomIntBetween(2, 10); // at least 2 documents otherwise we don't see any deletes below
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime());
diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java
index 8595f235c0e..f71f2d72019 100644
--- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java
+++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java
@@ -986,7 +986,7 @@ public class ShadowEngineTests extends ESTestCase {
}
public void testDocStats() throws IOException {
- final int numDocs = randomIntBetween(1, 10);
+ final int numDocs = randomIntBetween(2, 10); // at least 2 documents otherwise we don't see any deletes below
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime());
diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java
index c3323919c82..0000a6d65dd 100644
--- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java
+++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java
@@ -332,6 +332,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put("index.routing.allocation.include._name", primariesNode.getNode().getName())
.put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE)
+ .put("index.allocation.max_retries", Integer.MAX_VALUE) // keep on retrying
));
ensureGreen(); // allocated with empty commit
diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java
index 76f7a30e078..2abf07164bf 100644
--- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java
@@ -105,6 +105,7 @@ public class IndicesServiceTests extends ESSingleNodeTestCase {
indicesService.canDeleteShardContent(notAllocated, test.getIndexSettings()));
}
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/18558")
public void testDeleteIndexStore() throws Exception {
IndicesService indicesService = getIndicesService();
IndexService test = createIndex("test");
diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java
index a7648e18cd6..98ae3241dbb 100644
--- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java
+++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java
@@ -43,15 +43,6 @@ import static org.hamcrest.Matchers.is;
@ESIntegTestCase.ClusterScope(minNumDataNodes = 2)
public class SearchWhileRelocatingIT extends ESIntegTestCase {
- @Nightly
- public void testSearchAndRelocateConcurrently0Replicas() throws Exception {
- testSearchAndRelocateConcurrently(0);
- }
-
- @Nightly
- public void testSearchAndRelocateConcurrently1Replicas() throws Exception {
- testSearchAndRelocateConcurrently(1);
- }
public void testSearchAndRelocateConcurrentlyRanodmReplicas() throws Exception {
testSearchAndRelocateConcurrently(randomIntBetween(0, 1));
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
new file mode 100644
index 00000000000..09129072177
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
@@ -0,0 +1,1135 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.suggest;
+
+import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
+import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.action.search.ReduceSearchPhaseException;
+import org.elasticsearch.action.search.SearchPhaseExecutionException;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBuilder;
+import org.elasticsearch.search.suggest.phrase.Laplace;
+import org.elasticsearch.search.suggest.phrase.LinearInterpolation;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
+import org.elasticsearch.search.suggest.phrase.StupidBackoff;
+import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
+import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode;
+import org.elasticsearch.test.ESIntegTestCase;
+import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.ExecutionException;
+
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
+import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
+import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion;
+import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
+import static org.hamcrest.Matchers.anyOf;
+import static org.hamcrest.Matchers.endsWith;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.nullValue;
+
+/**
+ * Integration tests for term and phrase suggestions. Many of these tests many requests that vary only slightly from one another. Where
+ * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that
+ * request, modify again, request again, etc. This makes it very obvious what changes between requests.
+ */
+public class SuggestSearchIT extends ESIntegTestCase {
+
+ // see #3196
+ public void testSuggestAcrossMultipleIndices() throws IOException {
+ createIndex("test");
+ ensureGreen();
+
+ index("test", "type1", "1", "text", "abcd");
+ index("test", "type1", "2", "text", "aacd");
+ index("test", "type1", "3", "text", "abbd");
+ index("test", "type1", "4", "text", "abcc");
+ refresh();
+
+ TermSuggestionBuilder termSuggest = termSuggestion("text")
+ .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
+ .text("abcd");
+ logger.info("--> run suggestions with one index");
+ searchSuggest("test", termSuggest);
+ createIndex("test_1");
+ ensureGreen();
+
+ index("test_1", "type1", "1", "text", "ab cd");
+ index("test_1", "type1", "2", "text", "aa cd");
+ index("test_1", "type1", "3", "text", "ab bd");
+ index("test_1", "type1", "4", "text", "ab cc");
+ refresh();
+ termSuggest = termSuggestion("text")
+ .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
+ .text("ab cd")
+ .minWordLength(1);
+ logger.info("--> run suggestions with two indices");
+ searchSuggest("test", termSuggest);
+
+
+ XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
+ .startObject("properties")
+ .startObject("text").field("type", "text").field("analyzer", "keyword").endObject()
+ .endObject()
+ .endObject().endObject();
+ assertAcked(prepareCreate("test_2").addMapping("type1", mapping));
+ ensureGreen();
+
+ index("test_2", "type1", "1", "text", "ab cd");
+ index("test_2", "type1", "2", "text", "aa cd");
+ index("test_2", "type1", "3", "text", "ab bd");
+ index("test_2", "type1", "4", "text", "ab cc");
+ index("test_2", "type1", "1", "text", "abcd");
+ index("test_2", "type1", "2", "text", "aacd");
+ index("test_2", "type1", "3", "text", "abbd");
+ index("test_2", "type1", "4", "text", "abcc");
+ refresh();
+
+ termSuggest = termSuggestion("text")
+ .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
+ .text("ab cd")
+ .minWordLength(1);
+ logger.info("--> run suggestions with three indices");
+ try {
+ searchSuggest("test", termSuggest);
+ fail(" can not suggest across multiple indices with different analysis chains");
+ } catch (ReduceSearchPhaseException ex) {
+ assertThat(ex.getCause(), instanceOf(IllegalStateException.class));
+ assertThat(ex.getCause().getMessage(),
+ anyOf(endsWith("Suggest entries have different sizes actual [1] expected [2]"),
+ endsWith("Suggest entries have different sizes actual [2] expected [1]")));
+ } catch (IllegalStateException ex) {
+ assertThat(ex.getMessage(), anyOf(endsWith("Suggest entries have different sizes actual [1] expected [2]"),
+ endsWith("Suggest entries have different sizes actual [2] expected [1]")));
+ }
+
+
+ termSuggest = termSuggestion("text")
+ .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
+ .text("ABCD")
+ .minWordLength(1);
+ logger.info("--> run suggestions with four indices");
+ try {
+ searchSuggest("test", termSuggest);
+ fail(" can not suggest across multiple indices with different analysis chains");
+ } catch (ReduceSearchPhaseException ex) {
+ assertThat(ex.getCause(), instanceOf(IllegalStateException.class));
+ assertThat(ex.getCause().getMessage(), anyOf(endsWith("Suggest entries have different text actual [ABCD] expected [abcd]"),
+ endsWith("Suggest entries have different text actual [abcd] expected [ABCD]")));
+ } catch (IllegalStateException ex) {
+ assertThat(ex.getMessage(), anyOf(endsWith("Suggest entries have different text actual [ABCD] expected [abcd]"),
+ endsWith("Suggest entries have different text actual [abcd] expected [ABCD]")));
+ }
+ }
+
+ // see #3037
+ public void testSuggestModes() throws IOException {
+ CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
+ .put(SETTING_NUMBER_OF_SHARDS, 1)
+ .put(SETTING_NUMBER_OF_REPLICAS, 0)
+ .put("index.analysis.analyzer.biword.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
+ .put("index.analysis.filter.shingler.type", "shingle")
+ .put("index.analysis.filter.shingler.min_shingle_size", 2)
+ .put("index.analysis.filter.shingler.max_shingle_size", 3));
+
+ XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
+ .startObject("properties")
+ .startObject("name")
+ .field("type", "text")
+ .startObject("fields")
+ .startObject("shingled")
+ .field("type", "text")
+ .field("analyzer", "biword")
+ .field("search_analyzer", "standard")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject().endObject();
+ assertAcked(builder.addMapping("type1", mapping));
+ ensureGreen();
+
+
+ index("test", "type1", "1", "name", "I like iced tea");
+ index("test", "type1", "2", "name", "I like tea.");
+ index("test", "type1", "3", "name", "I like ice cream.");
+ refresh();
+
+ DirectCandidateGeneratorBuilder generator = candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always")
+ .maxEdits(2);
+ PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("name.shingled")
+ .addCandidateGenerator(generator)
+ .gramSize(3);
+ Suggest searchSuggest = searchSuggest("ice tea", "did_you_mean", phraseSuggestion);
+ assertSuggestion(searchSuggest, 0, "did_you_mean", "iced tea");
+
+ generator.suggestMode(null);
+ searchSuggest = searchSuggest( "ice tea", "did_you_mean", phraseSuggestion);
+ assertSuggestionSize(searchSuggest, 0, 0, "did_you_mean");
+ }
+
+ /**
+ * Creates a new {@link DirectCandidateGeneratorBuilder}
+ *
+ * @param field
+ * the field this candidate generator operates on.
+ */
+ private DirectCandidateGeneratorBuilder candidateGenerator(String field) {
+ return new DirectCandidateGeneratorBuilder(field);
+ }
+
+ // see #2729
+ public void testSizeOneShard() throws Exception {
+ prepareCreate("test").setSettings(
+ SETTING_NUMBER_OF_SHARDS, 1,
+ SETTING_NUMBER_OF_REPLICAS, 0).get();
+ ensureGreen();
+
+ for (int i = 0; i < 15; i++) {
+ index("test", "type1", Integer.toString(i), "text", "abc" + i);
+ }
+ refresh();
+
+ SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellchecker")).get();
+ assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue());
+
+ TermSuggestionBuilder termSuggestion = termSuggestion("text")
+ .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
+ .text("abcd")
+ .size(10);
+ Suggest suggest = searchSuggest("test", termSuggestion);
+ assertSuggestion(suggest, 0, "test", 10, "abc0");
+
+ termSuggestion.text("abcd").shardSize(5);
+ suggest = searchSuggest("test", termSuggestion);
+ assertSuggestion(suggest, 0, "test", 5, "abc0");
+ }
+
+ public void testUnmappedField() throws IOException, InterruptedException, ExecutionException {
+ CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
+ .put(indexSettings())
+ .put("index.analysis.analyzer.biword.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
+ .put("index.analysis.filter.shingler.type", "shingle")
+ .put("index.analysis.filter.shingler.min_shingle_size", 2)
+ .put("index.analysis.filter.shingler.max_shingle_size", 3));
+ XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
+ .startObject("properties")
+ .startObject("name")
+ .field("type", "text")
+ .startObject("fields")
+ .startObject("shingled")
+ .field("type", "text")
+ .field("analyzer", "biword")
+ .field("search_analyzer", "standard")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject().endObject();
+ assertAcked(builder.addMapping("type1", mapping));
+ ensureGreen();
+
+ indexRandom(true, client().prepareIndex("test", "type1").setSource("name", "I like iced tea"),
+ client().prepareIndex("test", "type1").setSource("name", "I like tea."),
+ client().prepareIndex("test", "type1").setSource("name", "I like ice cream."));
+ refresh();
+
+ PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("name.shingled")
+ .addCandidateGenerator(candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2))
+ .gramSize(3);
+ Suggest searchSuggest = searchSuggest("ice tea", "did_you_mean", phraseSuggestion);
+ assertSuggestion(searchSuggest, 0, 0, "did_you_mean", "iced tea");
+
+ phraseSuggestion = phraseSuggestion("nosuchField")
+ .addCandidateGenerator(candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2))
+ .gramSize(3);
+ {
+ SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0);
+ searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion));
+ assertThrows(searchBuilder, SearchPhaseExecutionException.class);
+ }
+ {
+ SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0);
+ searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion));
+ assertThrows(searchBuilder, SearchPhaseExecutionException.class);
+ }
+ }
+
+ public void testSimple() throws Exception {
+ createIndex("test");
+ ensureGreen();
+
+ index("test", "type1", "1", "text", "abcd");
+ index("test", "type1", "2", "text", "aacd");
+ index("test", "type1", "3", "text", "abbd");
+ index("test", "type1", "4", "text", "abcc");
+ refresh();
+
+ SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellcecker")).get();
+ assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue());
+
+ TermSuggestionBuilder termSuggest = termSuggestion("text")
+ .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
+ .text("abcd");
+ Suggest suggest = searchSuggest("test", termSuggest);
+ assertSuggestion(suggest, 0, "test", "aacd", "abbd", "abcc");
+ assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
+
+ suggest = searchSuggest("test", termSuggest);
+ assertSuggestion(suggest, 0, "test", "aacd","abbd", "abcc");
+ assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
+ }
+
+ public void testEmpty() throws Exception {
+ createIndex("test");
+ ensureGreen();
+
+ index("test", "type1", "1", "text", "bar");
+ refresh();
+
+ TermSuggestionBuilder termSuggest = termSuggestion("text")
+ .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
+ .text("abcd");
+ Suggest suggest = searchSuggest("test", termSuggest);
+ assertSuggestionSize(suggest, 0, 0, "test");
+ assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
+
+ suggest = searchSuggest("test", termSuggest);
+ assertSuggestionSize(suggest, 0, 0, "test");
+ assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
+ }
+
+ public void testWithMultipleCommands() throws Exception {
+ createIndex("test");
+ ensureGreen();
+
+ index("test", "typ1", "1", "field1", "prefix_abcd", "field2", "prefix_efgh");
+ index("test", "typ1", "2", "field1", "prefix_aacd", "field2", "prefix_eeeh");
+ index("test", "typ1", "3", "field1", "prefix_abbd", "field2", "prefix_efff");
+ index("test", "typ1", "4", "field1", "prefix_abcc", "field2", "prefix_eggg");
+ refresh();
+
+ Map> suggestions = new HashMap<>();
+ suggestions.put("size1", termSuggestion("field1")
+ .size(1).text("prefix_abcd").maxTermFreq(10).prefixLength(1).minDocFreq(0)
+ .suggestMode(SuggestMode.ALWAYS));
+ suggestions.put("field2", termSuggestion("field2")
+ .text("prefix_eeeh prefix_efgh")
+ .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
+ suggestions.put("accuracy", termSuggestion("field2")
+ .text("prefix_efgh").accuracy(1f)
+ .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
+ Suggest suggest = searchSuggest(null, 0, suggestions);
+ assertSuggestion(suggest, 0, "size1", "prefix_aacd");
+ assertThat(suggest.getSuggestion("field2").getEntries().get(0).getText().string(), equalTo("prefix_eeeh"));
+ assertSuggestion(suggest, 0, "field2", "prefix_efgh");
+ assertThat(suggest.getSuggestion("field2").getEntries().get(1).getText().string(), equalTo("prefix_efgh"));
+ assertSuggestion(suggest, 1, "field2", "prefix_eeeh", "prefix_efff", "prefix_eggg");
+ assertSuggestionSize(suggest, 0, 0, "accuracy");
+ }
+
+ public void testSizeAndSort() throws Exception {
+ createIndex("test");
+ ensureGreen();
+
+ Map termsAndDocCount = new HashMap<>();
+ termsAndDocCount.put("prefix_aaad", 20);
+ termsAndDocCount.put("prefix_abbb", 18);
+ termsAndDocCount.put("prefix_aaca", 16);
+ termsAndDocCount.put("prefix_abba", 14);
+ termsAndDocCount.put("prefix_accc", 12);
+ termsAndDocCount.put("prefix_addd", 10);
+ termsAndDocCount.put("prefix_abaa", 8);
+ termsAndDocCount.put("prefix_dbca", 6);
+ termsAndDocCount.put("prefix_cbad", 4);
+ termsAndDocCount.put("prefix_aacd", 1);
+ termsAndDocCount.put("prefix_abcc", 1);
+ termsAndDocCount.put("prefix_accd", 1);
+
+ for (Entry entry : termsAndDocCount.entrySet()) {
+ for (int i = 0; i < entry.getValue(); i++) {
+ index("test", "type1", entry.getKey() + i, "field1", entry.getKey());
+ }
+ }
+ refresh();
+
+ Map> suggestions = new HashMap<>();
+ suggestions.put("size3SortScoreFirst", termSuggestion("field1")
+ .size(3).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
+ suggestions.put("size10SortScoreFirst", termSuggestion("field1")
+ .size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS).shardSize(50));
+ suggestions.put("size3SortScoreFirstMaxEdits1", termSuggestion("field1")
+ .maxEdits(1)
+ .size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
+ suggestions.put("size10SortFrequencyFirst", termSuggestion("field1")
+ .size(10).sort(SortBy.FREQUENCY).shardSize(1000)
+ .minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
+ Suggest suggest = searchSuggest("prefix_abcd", 0, suggestions);
+
+ // The commented out assertions fail sometimes because suggestions are based off of shard frequencies instead of index frequencies.
+ assertSuggestion(suggest, 0, "size3SortScoreFirst", "prefix_aacd", "prefix_abcc", "prefix_accd");
+ assertSuggestion(suggest, 0, "size10SortScoreFirst", 10, "prefix_aacd", "prefix_abcc", "prefix_accd" /*, "prefix_aaad" */);
+ assertSuggestion(suggest, 0, "size3SortScoreFirstMaxEdits1", "prefix_aacd", "prefix_abcc", "prefix_accd");
+ assertSuggestion(suggest, 0, "size10SortFrequencyFirst", "prefix_aaad", "prefix_abbb", "prefix_aaca", "prefix_abba",
+ "prefix_accc", "prefix_addd", "prefix_abaa", "prefix_dbca", "prefix_cbad", "prefix_aacd");
+
+ // assertThat(suggest.get(3).getSuggestedWords().get("prefix_abcd").get(4).getTerm(), equalTo("prefix_abcc"));
+ // assertThat(suggest.get(3).getSuggestedWords().get("prefix_abcd").get(4).getTerm(), equalTo("prefix_accd"));
+ }
+
+ // see #2817
+ public void testStopwordsOnlyPhraseSuggest() throws IOException {
+ assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=text,analyzer=stopwd").setSettings(
+ Settings.builder()
+ .put("index.analysis.analyzer.stopwd.tokenizer", "whitespace")
+ .putArray("index.analysis.analyzer.stopwd.filter", "stop")
+ ));
+ ensureGreen();
+ index("test", "typ1", "1", "body", "this is a test");
+ refresh();
+
+ Suggest searchSuggest = searchSuggest( "a an the", "simple_phrase",
+ phraseSuggestion("body").gramSize(1)
+ .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
+ .size(1));
+ assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
+ }
+
+ public void testPrefixLength() throws IOException {
+ CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
+ .put(SETTING_NUMBER_OF_SHARDS, 1)
+ .put("index.analysis.analyzer.reverse.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse")
+ .put("index.analysis.analyzer.body.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.body.filter", "lowercase")
+ .put("index.analysis.analyzer.bigram.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
+ .put("index.analysis.filter.my_shingle.type", "shingle")
+ .put("index.analysis.filter.my_shingle.output_unigrams", false)
+ .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
+ .put("index.analysis.filter.my_shingle.max_shingle_size", 2));
+ XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
+ .startObject("_all").field("store", true).field("term_vector", "with_positions_offsets").endObject()
+ .startObject("properties")
+ .startObject("body").field("type", "text").field("analyzer", "body").endObject()
+ .startObject("body_reverse").field("type", "text").field("analyzer", "reverse").endObject()
+ .startObject("bigram").field("type", "text").field("analyzer", "bigram").endObject()
+ .endObject()
+ .endObject().endObject();
+ assertAcked(builder.addMapping("type1", mapping));
+ ensureGreen();
+
+ index("test", "type1", "1", "body", "hello world");
+ index("test", "type1", "2", "body", "hello world");
+ index("test", "type1", "3", "body", "hello words");
+ refresh();
+
+ Suggest searchSuggest = searchSuggest( "hello word", "simple_phrase",
+ phraseSuggestion("body")
+ .addCandidateGenerator(candidateGenerator("body").prefixLength(4).minWordLength(1).suggestMode("always"))
+ .size(1).confidence(1.0f));
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "hello words");
+
+ searchSuggest = searchSuggest( "hello word", "simple_phrase",
+ phraseSuggestion("body")
+ .addCandidateGenerator(candidateGenerator("body").prefixLength(2).minWordLength(1).suggestMode("always"))
+ .size(1).confidence(1.0f));
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "hello world");
+ }
+
+ public void testBasicPhraseSuggest() throws IOException, URISyntaxException {
+ CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
+ .put(indexSettings())
+ .put("index.analysis.analyzer.reverse.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse")
+ .put("index.analysis.analyzer.body.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.body.filter", "lowercase")
+ .put("index.analysis.analyzer.bigram.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
+ .put("index.analysis.filter.my_shingle.type", "shingle")
+ .put("index.analysis.filter.my_shingle.output_unigrams", false)
+ .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
+ .put("index.analysis.filter.my_shingle.max_shingle_size", 2)
+ .put("index.number_of_shards", 1));
+ XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
+ .startObject("_all")
+ .field("store", true)
+ .field("term_vector", "with_positions_offsets")
+ .endObject()
+ .startObject("properties")
+ .startObject("body").
+ field("type", "text").
+ field("analyzer", "body")
+ .endObject()
+ .startObject("body_reverse").
+ field("type", "text").
+ field("analyzer", "reverse")
+ .endObject()
+ .startObject("bigram").
+ field("type", "text").
+ field("analyzer", "bigram")
+ .endObject()
+ .endObject()
+ .endObject().endObject();
+ assertAcked(builder.addMapping("type1", mapping));
+ ensureGreen();
+
+ String[] strings = new String[]{
+ "Arthur, King of the Britons",
+ "Sir Lancelot the Brave",
+ "Patsy, Arthur's Servant",
+ "Sir Robin the Not-Quite-So-Brave-as-Sir-Lancelot",
+ "Sir Bedevere the Wise",
+ "Sir Galahad the Pure",
+ "Miss Islington, the Witch",
+ "Zoot",
+ "Leader of Robin's Minstrels",
+ "Old Crone",
+ "Frank, the Historian",
+ "Frank's Wife",
+ "Dr. Piglet",
+ "Dr. Winston",
+ "Sir Robin (Stand-in)",
+ "Knight Who Says Ni",
+ "Police sergeant who stops the film",
+ };
+ for (String line : strings) {
+ index("test", "type1", line, "body", line, "body_reverse", line, "bigram", line);
+ }
+ refresh();
+
+ PhraseSuggestionBuilder phraseSuggest = phraseSuggestion("bigram").gramSize(2).analyzer("body")
+ .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
+ .size(1);
+ Suggest searchSuggest = searchSuggest( "Frank's Wise", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "frank's wife");
+ assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Frank's Wise"));
+
+ phraseSuggest.realWordErrorLikelihood(0.95f);
+ searchSuggest = searchSuggest( "Artur, Kinh of the Britons", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "arthur king of the britons");
+ // Check the "text" field this one time.
+ assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(),
+ equalTo("Artur, Kinh of the Britons"));
+
+ // Ask for highlighting
+ phraseSuggest.highlight("", "");
+ searchSuggest = searchSuggest( "Artur, King of the Britns", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "arthur king of the britons");
+ assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getOptions().get(0).getHighlighted().string(),
+ equalTo("arthur king of the britons"));
+
+ // pass in a correct phrase
+ phraseSuggest.highlight(null, null).confidence(0f).size(1).maxErrors(0.5f);
+ searchSuggest = searchSuggest( "Arthur, King of the Britons", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "arthur king of the britons");
+
+ // pass in a correct phrase - set confidence to 2
+ phraseSuggest.confidence(2f);
+ searchSuggest = searchSuggest( "Arthur, King of the Britons", "simple_phrase", phraseSuggest);
+ assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
+
+ // pass in a correct phrase - set confidence to 0.99
+ phraseSuggest.confidence(0.99f);
+ searchSuggest = searchSuggest( "Arthur, King of the Britons", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "arthur king of the britons");
+
+ //test reverse suggestions with pre & post filter
+ phraseSuggest
+ .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
+ .addCandidateGenerator(candidateGenerator("body_reverse").minWordLength(1).suggestMode("always").preFilter("reverse")
+ .postFilter("reverse"));
+ searchSuggest = searchSuggest( "Artur, Ging of the Britons", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "arthur king of the britons");
+
+ // set all mass to trigrams (not indexed)
+ phraseSuggest.clearCandidateGenerators()
+ .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
+ .smoothingModel(new LinearInterpolation(1,0,0));
+ searchSuggest = searchSuggest( "Artur, King of the Britns", "simple_phrase", phraseSuggest);
+ assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
+
+ // set all mass to bigrams
+ phraseSuggest.smoothingModel(new LinearInterpolation(0,1,0));
+ searchSuggest = searchSuggest( "Artur, King of the Britns", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "arthur king of the britons");
+
+ // distribute mass
+ phraseSuggest.smoothingModel(new LinearInterpolation(0.4,0.4,0.2));
+ searchSuggest = searchSuggest( "Artur, King of the Britns", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "arthur king of the britons");
+
+ searchSuggest = searchSuggest( "Frank's Wise", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "frank's wife");
+
+ // try all smoothing methods
+ phraseSuggest.smoothingModel(new LinearInterpolation(0.4,0.4,0.2));
+ searchSuggest = searchSuggest( "Artur, King of the Britns", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "arthur king of the britons");
+
+ phraseSuggest.smoothingModel(new Laplace(0.2));
+ searchSuggest = searchSuggest( "Artur, King of the Britns", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "arthur king of the britons");
+
+ phraseSuggest.smoothingModel(new StupidBackoff(0.1));
+ searchSuggest = searchSuggest( "Artur, King of the Britns", "simple_phrase",phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "arthur king of the britons");
+
+ // check tokenLimit
+ phraseSuggest.smoothingModel(null).tokenLimit(4);
+ searchSuggest = searchSuggest( "Artur, King of the Britns", "simple_phrase", phraseSuggest);
+ assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
+
+ phraseSuggest.tokenLimit(15).smoothingModel(new StupidBackoff(0.1));
+ searchSuggest = searchSuggest( "Sir Bedever the Wife Sir Bedever the Wife Sir Bedever the Wife", "simple_phrase", phraseSuggest);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "sir bedevere the wise sir bedevere the wise sir bedevere the wise");
+ // Check the name this time because we're repeating it which is funky
+ assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(),
+ equalTo("Sir Bedever the Wife Sir Bedever the Wife Sir Bedever the Wife"));
+ }
+
+ public void testSizeParam() throws IOException {
+ CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
+ .put(SETTING_NUMBER_OF_SHARDS, 1)
+ .put("index.analysis.analyzer.reverse.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse")
+ .put("index.analysis.analyzer.body.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.body.filter", "lowercase")
+ .put("index.analysis.analyzer.bigram.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
+ .put("index.analysis.filter.my_shingle.type", "shingle")
+ .put("index.analysis.filter.my_shingle.output_unigrams", false)
+ .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
+ .put("index.analysis.filter.my_shingle.max_shingle_size", 2));
+
+ XContentBuilder mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("type1")
+ .startObject("_all")
+ .field("store", true)
+ .field("term_vector", "with_positions_offsets")
+ .endObject()
+ .startObject("properties")
+ .startObject("body")
+ .field("type", "text")
+ .field("analyzer", "body")
+ .endObject()
+ .startObject("body_reverse")
+ .field("type", "text")
+ .field("analyzer", "reverse")
+ .endObject()
+ .startObject("bigram")
+ .field("type", "text")
+ .field("analyzer", "bigram")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject();
+ assertAcked(builder.addMapping("type1", mapping));
+ ensureGreen();
+
+ String line = "xorr the god jewel";
+ index("test", "type1", "1", "body", line, "body_reverse", line, "bigram", line);
+ line = "I got it this time";
+ index("test", "type1", "2", "body", line, "body_reverse", line, "bigram", line);
+ refresh();
+
+ PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("bigram")
+ .realWordErrorLikelihood(0.95f)
+ .gramSize(2)
+ .analyzer("body")
+ .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(1)
+ .accuracy(0.1f))
+ .smoothingModel(new StupidBackoff(0.1))
+ .maxErrors(1.0f)
+ .size(5);
+ Suggest searchSuggest = searchSuggest("Xorr the Gut-Jewel", "simple_phrase", phraseSuggestion);
+ assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
+
+ // we allow a size of 2 now on the shard generator level so "god" will be found since it's LD2
+ phraseSuggestion.clearCandidateGenerators()
+ .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(2)
+ .accuracy(0.1f));
+ searchSuggest = searchSuggest( "Xorr the Gut-Jewel", "simple_phrase", phraseSuggestion);
+ assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
+ }
+
+ public void testPhraseBoundaryCases() throws IOException, URISyntaxException {
+ CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
+ .put(indexSettings()).put(SETTING_NUMBER_OF_SHARDS, 1) // to get reliable statistics we should put this all into one shard
+ .put("index.analysis.analyzer.body.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.body.filter", "lowercase")
+ .put("index.analysis.analyzer.bigram.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
+ .put("index.analysis.analyzer.ngram.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.ngram.filter", "my_shingle2", "lowercase")
+ .put("index.analysis.analyzer.myDefAnalyzer.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.myDefAnalyzer.filter", "shingle", "lowercase")
+ .put("index.analysis.filter.my_shingle.type", "shingle")
+ .put("index.analysis.filter.my_shingle.output_unigrams", false)
+ .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
+ .put("index.analysis.filter.my_shingle.max_shingle_size", 2)
+ .put("index.analysis.filter.my_shingle2.type", "shingle")
+ .put("index.analysis.filter.my_shingle2.output_unigrams", true)
+ .put("index.analysis.filter.my_shingle2.min_shingle_size", 2)
+ .put("index.analysis.filter.my_shingle2.max_shingle_size", 2));
+
+ XContentBuilder mapping = XContentFactory.jsonBuilder()
+ .startObject().startObject("type1")
+ .startObject("_all").field("store", true).field("term_vector", "with_positions_offsets").endObject()
+ .startObject("properties")
+ .startObject("body").field("type", "text").field("analyzer", "body").endObject()
+ .startObject("bigram").field("type", "text").field("analyzer", "bigram").endObject()
+ .startObject("ngram").field("type", "text").field("analyzer", "ngram").endObject()
+ .endObject()
+ .endObject().endObject();
+ assertAcked(builder.addMapping("type1", mapping));
+ ensureGreen();
+
+ String[] strings = new String[]{
+ "Xorr the God-Jewel",
+ "Grog the God-Crusher",
+ "Xorn",
+ "Walter Newell",
+ "Wanda Maximoff",
+ "Captain America",
+ "American Ace",
+ "Wundarr the Aquarian",
+ "Will o' the Wisp",
+ "Xemnu the Titan"
+ };
+ for (String line : strings) {
+ index("test", "type1", line, "body", line, "bigram", line, "ngram", line);
+ }
+ refresh();
+
+ NumShards numShards = getNumShards("test");
+
+ // Lets make sure some things throw exceptions
+ PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("bigram")
+ .analyzer("body")
+ .addCandidateGenerator(candidateGenerator("does_not_exist").minWordLength(1).suggestMode("always"))
+ .realWordErrorLikelihood(0.95f)
+ .maxErrors(0.5f)
+ .size(1);
+ phraseSuggestion.clearCandidateGenerators().analyzer(null);
+ try {
+ searchSuggest("xor the got-jewel", numShards.numPrimaries, Collections.singletonMap("simple_phrase", phraseSuggestion));
+ fail("analyzer does only produce ngrams");
+ } catch (SearchPhaseExecutionException e) {
+ }
+
+ phraseSuggestion.analyzer("bigram");
+ try {
+ searchSuggest("xor the got-jewel", numShards.numPrimaries, Collections.singletonMap("simple_phrase", phraseSuggestion));
+ fail("analyzer does only produce ngrams");
+ } catch (SearchPhaseExecutionException e) {
+ }
+
+ // Now we'll make sure some things don't
+ phraseSuggestion.forceUnigrams(false);
+ searchSuggest( "xor the got-jewel", 0, Collections.singletonMap("simple_phrase", phraseSuggestion));
+
+ // Field doesn't produce unigrams but the analyzer does
+ phraseSuggestion.forceUnigrams(true).analyzer("ngram");
+ searchSuggest( "xor the got-jewel", 0, Collections.singletonMap("simple_phrase", phraseSuggestion));
+
+ phraseSuggestion = phraseSuggestion("ngram")
+ .analyzer("myDefAnalyzer")
+ .forceUnigrams(true)
+ .realWordErrorLikelihood(0.95f)
+ .maxErrors(0.5f)
+ .size(1)
+ .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"));
+ Suggest suggest = searchSuggest( "xor the got-jewel", 0, Collections.singletonMap("simple_phrase", phraseSuggestion));
+
+ // "xorr the god jewel" and and "xorn the god jewel" have identical scores (we are only using unigrams to score), so we tie break by
+ // earlier term (xorn):
+ assertSuggestion(suggest, 0, "simple_phrase", "xorn the god jewel");
+
+ phraseSuggestion.analyzer(null);
+ suggest = searchSuggest( "xor the got-jewel", 0, Collections.singletonMap("simple_phrase", phraseSuggestion));
+
+ // In this case xorr has a better score than xorn because we set the field back to the default (my_shingle2) analyzer, so the
+ // probability that the term is not in the dictionary but is NOT a misspelling is relatively high in this case compared to the
+ // others that have no n-gram with the other terms in the phrase :) you can set this realWorldErrorLikelyhood
+ assertSuggestion(suggest, 0, "simple_phrase", "xorr the god jewel");
+ }
+
+ public void testDifferentShardSize() throws Exception {
+ createIndex("test");
+ ensureGreen();
+ indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "foobar1").setRouting("1"),
+ client().prepareIndex("test", "type1", "2").setSource("field1", "foobar2").setRouting("2"),
+ client().prepareIndex("test", "type1", "3").setSource("field1", "foobar3").setRouting("3"));
+
+ Suggest suggest = searchSuggest( "foobar", "simple",
+ termSuggestion("field1")
+ .size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
+ ElasticsearchAssertions.assertSuggestionSize(suggest, 0, 3, "simple");
+ }
+
+ // see #3469
+ public void testShardFailures() throws IOException, InterruptedException {
+ CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
+ .put(indexSettings())
+ .put("index.analysis.analyzer.suggest.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
+ .put("index.analysis.filter.shingler.type", "shingle")
+ .put("index.analysis.filter.shingler.min_shingle_size", 2)
+ .put("index.analysis.filter.shingler.max_shingle_size", 5)
+ .put("index.analysis.filter.shingler.output_unigrams", true));
+
+ XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type2")
+ .startObject("properties")
+ .startObject("name")
+ .field("type", "text")
+ .field("analyzer", "suggest")
+ .endObject()
+ .endObject()
+ .endObject().endObject();
+ assertAcked(builder.addMapping("type2", mapping));
+ ensureGreen();
+
+ index("test", "type2", "1", "foo", "bar");
+ index("test", "type2", "2", "foo", "bar");
+ index("test", "type2", "3", "foo", "bar");
+ index("test", "type2", "4", "foo", "bar");
+ index("test", "type2", "5", "foo", "bar");
+ index("test", "type2", "1", "name", "Just testing the suggestions api");
+ index("test", "type2", "2", "name", "An other title about equal length");
+ // Note that the last document has to have about the same length as the other or cutoff rechecking will remove the useful suggestion
+ refresh();
+
+ // When searching on a shard with a non existing mapping, we should fail
+ SearchRequestBuilder request = client().prepareSearch().setSize(0)
+ .suggest(
+ new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean",
+ phraseSuggestion("fielddoesnotexist").maxErrors(5.0f)));
+ assertThrows(request, SearchPhaseExecutionException.class);
+
+ // When searching on a shard which does not hold yet any document of an existing type, we should not fail
+ SearchResponse searchResponse = client().prepareSearch().setSize(0)
+ .suggest(
+ new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean",
+ phraseSuggestion("name").maxErrors(5.0f)))
+ .get();
+ ElasticsearchAssertions.assertNoFailures(searchResponse);
+ ElasticsearchAssertions.assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions");
+ }
+
+ // see #3469
+ public void testEmptyShards() throws IOException, InterruptedException {
+ XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().
+ startObject().
+ startObject("type1").
+ startObject("properties").
+ startObject("name").
+ field("type", "text").
+ field("analyzer", "suggest").
+ endObject().
+ endObject().
+ endObject().
+ endObject();
+ assertAcked(prepareCreate("test").setSettings(Settings.builder()
+ .put(indexSettings())
+ .put("index.analysis.analyzer.suggest.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
+ .put("index.analysis.filter.shingler.type", "shingle")
+ .put("index.analysis.filter.shingler.min_shingle_size", 2)
+ .put("index.analysis.filter.shingler.max_shingle_size", 5)
+ .put("index.analysis.filter.shingler.output_unigrams", true)).addMapping("type1", mappingBuilder));
+ ensureGreen();
+
+ index("test", "type2", "1", "foo", "bar");
+ index("test", "type2", "2", "foo", "bar");
+ index("test", "type1", "1", "name", "Just testing the suggestions api");
+ index("test", "type1", "2", "name", "An other title about equal length");
+ refresh();
+
+ SearchResponse searchResponse = client().prepareSearch()
+ .setSize(0)
+ .suggest(
+ new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean",
+ phraseSuggestion("name").maxErrors(5.0f)))
+ .get();
+
+ assertNoFailures(searchResponse);
+ assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions");
+ }
+
+ /**
+ * Searching for a rare phrase shouldn't provide any suggestions if confidence > 1. This was possible before we rechecked the cutoff
+ * score during the reduce phase. Failures don't occur every time - maybe two out of five tries but we don't repeat it to save time.
+ */
+ public void testSearchForRarePhrase() throws IOException {
+ // If there isn't enough chaf per shard then shards can become unbalanced, making the cutoff recheck this is testing do more harm
+ // then good.
+ int chafPerShard = 100;
+
+ CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
+ .put(indexSettings())
+ .put("index.analysis.analyzer.body.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.body.filter", "lowercase", "my_shingle")
+ .put("index.analysis.filter.my_shingle.type", "shingle")
+ .put("index.analysis.filter.my_shingle.output_unigrams", true)
+ .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
+ .put("index.analysis.filter.my_shingle.max_shingle_size", 2));
+
+ XContentBuilder mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("type1")
+ .startObject("_all")
+ .field("store", true)
+ .field("term_vector", "with_positions_offsets")
+ .endObject()
+ .startObject("properties")
+ .startObject("body")
+ .field("type", "text")
+ .field("analyzer", "body")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject();
+ assertAcked(builder.addMapping("type1", mapping));
+ ensureGreen();
+
+ NumShards test = getNumShards("test");
+
+ List phrases = new ArrayList<>();
+ Collections.addAll(phrases, "nobel prize", "noble gases", "somethingelse prize", "pride and joy", "notes are fun");
+ for (int i = 0; i < 8; i++) {
+ phrases.add("noble somethingelse" + i);
+ }
+ for (int i = 0; i < test.numPrimaries * chafPerShard; i++) {
+ phrases.add("chaff" + i);
+ }
+ for (String phrase: phrases) {
+ index("test", "type1", phrase, "body", phrase);
+ }
+ refresh();
+
+ Suggest searchSuggest = searchSuggest("nobel prize", "simple_phrase", phraseSuggestion("body")
+ .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f))
+ .confidence(2f)
+ .maxErrors(5f)
+ .size(1));
+ assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
+
+ searchSuggest = searchSuggest("noble prize", "simple_phrase", phraseSuggestion("body")
+ .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f))
+ .confidence(2f)
+ .maxErrors(5f)
+ .size(1));
+ assertSuggestion(searchSuggest, 0, 0, "simple_phrase", "nobel prize");
+ }
+
+ public void testSuggestWithManyCandidates() throws InterruptedException, ExecutionException, IOException {
+ CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
+ .put(indexSettings())
+ .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
+ .put("index.analysis.analyzer.text.tokenizer", "standard")
+ .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
+ .put("index.analysis.filter.my_shingle.type", "shingle")
+ .put("index.analysis.filter.my_shingle.output_unigrams", true)
+ .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
+ .put("index.analysis.filter.my_shingle.max_shingle_size", 3));
+
+ XContentBuilder mapping = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("type1")
+ .startObject("properties")
+ .startObject("title")
+ .field("type", "text")
+ .field("analyzer", "text")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject();
+ assertAcked(builder.addMapping("type1", mapping));
+ ensureGreen();
+
+ List titles = new ArrayList<>();
+
+ // We're going to be searching for:
+ // united states house of representatives elections in washington 2006
+ // But we need to make sure we generate a ton of suggestions so we add a bunch of candidates.
+ // Many of these candidates are drawn from page names on English Wikipedia.
+
+ // Tons of different options very near the exact query term
+ titles.add("United States House of Representatives Elections in Washington 1789");
+ for (int year = 1790; year < 2014; year+= 2) {
+ titles.add("United States House of Representatives Elections in Washington " + year);
+ }
+ // Six of these are near enough to be viable suggestions, just not the top one
+
+ // But we can't stop there! Titles that are just a year are pretty common so lets just add one per year
+ // since 0. Why not?
+ for (int year = 0; year < 2015; year++) {
+ titles.add(Integer.toString(year));
+ }
+ // That ought to provide more less good candidates for the last term
+
+ // Now remove or add plural copies of every term we can
+ titles.add("State");
+ titles.add("Houses of Parliament");
+ titles.add("Representative Government");
+ titles.add("Election");
+
+ // Now some possessive
+ titles.add("Washington's Birthday");
+
+ // And some conjugation
+ titles.add("Unified Modeling Language");
+ titles.add("Unite Against Fascism");
+ titles.add("Stated Income Tax");
+ titles.add("Media organizations housed within colleges");
+
+ // And other stuff
+ titles.add("Untied shoelaces");
+ titles.add("Unit circle");
+ titles.add("Untitled");
+ titles.add("Unicef");
+ titles.add("Unrated");
+ titles.add("UniRed");
+ titles.add("Jalan Uniten–Dengkil"); // Highway in Malaysia
+ titles.add("UNITAS");
+ titles.add("UNITER");
+ titles.add("Un-Led-Ed");
+ titles.add("STATS LLC");
+ titles.add("Staples");
+ titles.add("Skates");
+ titles.add("Statues of the Liberators");
+ titles.add("Staten Island");
+ titles.add("Statens Museum for Kunst");
+ titles.add("Hause"); // The last name or the German word, whichever.
+ titles.add("Hose");
+ titles.add("Hoses");
+ titles.add("Howse Peak");
+ titles.add("The Hoose-Gow");
+ titles.add("Hooser");
+ titles.add("Electron");
+ titles.add("Electors");
+ titles.add("Evictions");
+ titles.add("Coronal mass ejection");
+ titles.add("Wasington"); // A film?
+ titles.add("Warrington"); // A town in England
+ titles.add("Waddington"); // Lots of places have this name
+ titles.add("Watlington"); // Ditto
+ titles.add("Waplington"); // Yup, also a town
+ titles.add("Washing of the Spears"); // Book
+
+ for (char c = 'A'; c <= 'Z'; c++) {
+ // Can't forget lists, glorious lists!
+ titles.add("List of former members of the United States House of Representatives (" + c + ")");
+
+ // Lots of people are named Washington . LastName
+ titles.add("Washington " + c + ". Lastname");
+
+ // Lets just add some more to be evil
+ titles.add("United " + c);
+ titles.add("States " + c);
+ titles.add("House " + c);
+ titles.add("Elections " + c);
+ titles.add("2006 " + c);
+ titles.add(c + " United");
+ titles.add(c + " States");
+ titles.add(c + " House");
+ titles.add(c + " Elections");
+ titles.add(c + " 2006");
+ }
+
+ List builders = new ArrayList<>();
+ for (String title: titles) {
+ builders.add(client().prepareIndex("test", "type1").setSource("title", title));
+ }
+
+ indexRandom(true, builders);
+
+ PhraseSuggestionBuilder suggest = phraseSuggestion("title")
+ .addCandidateGenerator(candidateGenerator("title")
+ .suggestMode("always")
+ .maxTermFreq(.99f)
+ .size(1000) // Setting a silly high size helps of generate a larger list of candidates for testing.
+ .maxInspections(1000) // This too
+ )
+ .confidence(0f)
+ .maxErrors(2f)
+ .shardSize(30000)
+ .size(30000);
+ Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest);
+ assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006");
+ assertSuggestionSize(searchSuggest, 0, 25480, "title"); // Just to prove that we've run through a ton of options
+
+ suggest.size(1);
+ long start = System.currentTimeMillis();
+ searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest);
+ long total = System.currentTimeMillis() - start;
+ assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006");
+ // assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging
+ }
+
+ protected Suggest searchSuggest(String name, SuggestionBuilder> suggestion) {
+ return searchSuggest(null, name, suggestion);
+ }
+
+ protected Suggest searchSuggest(String suggestText, String name, SuggestionBuilder> suggestion) {
+ Map> map = new HashMap<>();
+ map.put(name, suggestion);
+ return searchSuggest(suggestText, 0, map);
+ }
+
+ protected Suggest searchSuggest(String suggestText, int expectShardsFailed, Map> suggestions) {
+ SearchRequestBuilder builder = client().prepareSearch().setSize(0);
+ SuggestBuilder suggestBuilder = new SuggestBuilder();
+ if (suggestText != null) {
+ suggestBuilder.setGlobalText(suggestText);
+ }
+ for (Entry> suggestion : suggestions.entrySet()) {
+ suggestBuilder.addSuggestion(suggestion.getKey(), suggestion.getValue());
+ }
+ builder.suggest(suggestBuilder);
+ SearchResponse actionGet = builder.execute().actionGet();
+ assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(expectShardsFailed));
+ return actionGet.getSuggest();
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java
index a7d6e8b920a..0640bf0d6f0 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellCheckerTests.java
@@ -59,7 +59,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
private final BytesRef preTag = new BytesRef("");
private final BytesRef postTag = new BytesRef("");
- public void testMarvelHeros() throws IOException {
+ public void testNgram() throws IOException {
RAMDirectory dir = new RAMDirectory();
Map mapping = new HashMap<>();
mapping.put("body_ngram", new Analyzer() {
@@ -87,9 +87,23 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
IndexWriter writer = new IndexWriter(dir, conf);
- BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), StandardCharsets.UTF_8));
- String line = null;
- while ((line = reader.readLine()) != null) {
+ String[] strings = new String[]{
+ "Xorr the God-Jewel",
+ "Grog the God-Crusher",
+ "Xorn",
+ "Walter Newell",
+ "Wanda Maximoff",
+ "Captain America",
+ "American Ace",
+ "USA Hero",
+ "Wundarr the Aquarian",
+ "Will o' the Wisp",
+ "Xemnu the Titan",
+ "Fantastic Four",
+ "Quasar",
+ "Quasar II"
+ };
+ for (String line : strings) {
Document doc = new Document();
doc.add(new Field("body", line, TextField.TYPE_NOT_STORED));
doc.add(new Field("body_ngram", line, TextField.TYPE_NOT_STORED));
@@ -161,7 +175,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
TokenFilter filter = new LowerCaseFilter(t);
try {
SolrSynonymParser parser = new SolrSynonymParser(true, false, new WhitespaceAnalyzer());
- parser.parse(new StringReader("usa => usa, america, american\nursa => usa, america, american"));
+ parser.parse(new StringReader("usa => usa, america, american"));
filter = new SynonymFilter(filter, parser.build(), true);
} catch (Exception e) {
throw new RuntimeException(e);
@@ -191,7 +205,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
assertThat(corrections[0].join(space, preTag, postTag).utf8ToString(), equalTo("captain america"));
}
- public void testMarvelHerosMultiGenerator() throws IOException {
+ public void testMultiGenerator() throws IOException {
RAMDirectory dir = new RAMDirectory();
Map mapping = new HashMap<>();
mapping.put("body_ngram", new Analyzer() {
@@ -228,9 +242,22 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
IndexWriter writer = new IndexWriter(dir, conf);
- BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), StandardCharsets.UTF_8));
- String line = null;
- while ((line = reader.readLine()) != null) {
+ String[] strings = new String[]{
+ "Xorr the God-Jewel",
+ "Grog the God-Crusher",
+ "Xorn",
+ "Walter Newell",
+ "Wanda Maximoff",
+ "Captain America",
+ "American Ace",
+ "Wundarr the Aquarian",
+ "Will o' the Wisp",
+ "Xemnu the Titan",
+ "Fantastic Four",
+ "Quasar",
+ "Quasar II"
+ };
+ for (String line : strings) {
Document doc = new Document();
doc.add(new Field("body", line, TextField.TYPE_NOT_STORED));
doc.add(new Field("body_reverse", line, TextField.TYPE_NOT_STORED));
@@ -284,7 +311,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
assertThat(corrections[0].join(new BytesRef(" ")).utf8ToString(), equalTo("quasar ii"));
}
- public void testMarvelHerosTrigram() throws IOException {
+ public void testTrigram() throws IOException {
RAMDirectory dir = new RAMDirectory();
Map mapping = new HashMap<>();
mapping.put("body_ngram", new Analyzer() {
@@ -312,9 +339,23 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
IndexWriterConfig conf = new IndexWriterConfig(wrapper);
IndexWriter writer = new IndexWriter(dir, conf);
- BufferedReader reader = new BufferedReader(new InputStreamReader(NoisyChannelSpellCheckerTests.class.getResourceAsStream("/config/names.txt"), StandardCharsets.UTF_8));
- String line = null;
- while ((line = reader.readLine()) != null) {
+ String[] strings = new String[]{
+ "Xorr the God-Jewel",
+ "Grog the God-Crusher",
+ "Xorn",
+ "Walter Newell",
+ "Wanda Maximoff",
+ "Captain America",
+ "American Ace",
+ "USA Hero",
+ "Wundarr the Aquarian",
+ "Will o' the Wisp",
+ "Xemnu the Titan",
+ "Fantastic Four",
+ "Quasar",
+ "Quasar II"
+ };
+ for (String line : strings) {
Document doc = new Document();
doc.add(new Field("body", line, TextField.TYPE_NOT_STORED));
doc.add(new Field("body_ngram", line, TextField.TYPE_NOT_STORED));
@@ -370,7 +411,7 @@ public class NoisyChannelSpellCheckerTests extends ESTestCase {
TokenFilter filter = new LowerCaseFilter(t);
try {
SolrSynonymParser parser = new SolrSynonymParser(true, false, new WhitespaceAnalyzer());
- parser.parse(new StringReader("usa => usa, america, american\nursa => usa, america, american"));
+ parser.parse(new StringReader("usa => usa, america, american"));
filter = new SynonymFilter(filter, parser.build(), true);
} catch (Exception e) {
throw new RuntimeException(e);
diff --git a/distribution/build.gradle b/distribution/build.gradle
index b4c183d9bd1..540796fc5a3 100644
--- a/distribution/build.gradle
+++ b/distribution/build.gradle
@@ -196,8 +196,8 @@ configure(subprojects.findAll { ['zip', 'tar', 'integ-test-zip'].contains(it.nam
into('bin') {
with copySpec {
with binFiles
- from('../src/main/resources') {
- include 'bin/*.bat'
+ from('../src/main/resources/bin') {
+ include '*.bat'
filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf'))
}
MavenFilteringHack.filter(it, expansions)
diff --git a/docs/reference/setup/bootstrap-checks.asciidoc b/docs/reference/setup/bootstrap-checks.asciidoc
index bfa43727d28..fea711a52f0 100644
--- a/docs/reference/setup/bootstrap-checks.asciidoc
+++ b/docs/reference/setup/bootstrap-checks.asciidoc
@@ -16,6 +16,7 @@ checks that fail appear as warnings in the Elasticsearch log. If
Elasticsearch is in production mode, any bootstrap checks that fail will
cause Elasticsearch to refuse to start.
+[float]
=== Development vs. production mode
By default, Elasticsearch binds and publishes to `localhost`. This is
@@ -45,7 +46,7 @@ check, you must configure the <>.
File descriptors are a Unix construct for tracking open "files". In Unix
though, https://en.wikipedia.org/wiki/Everything_is_a_file[everything is
a file]. For example, "files" could be a physical file, a virtual file
-(e.g., ``/proc/loadavg`), or network sockets. Elasticsearch requires
+(e.g., `/proc/loadavg`), or network sockets. Elasticsearch requires
lots file descriptors (e.g., every shard is composed of multiple
segments and other files, plus connections to other nodes, etc.). This
bootstrap check is enforced on OS X and Linux. To pass the file
@@ -85,14 +86,15 @@ equal to a quorum of master-eligible nodes, it is not possible for the
cluster to suffer from split brain because during a network partition
there can be at most one side of the partition that contains a quorum of
master nodes. The minimum master nodes check enforces that you've set
-<>. To pass the minimum master nodes check, you
-must configure
+<>. To pass
+the minimum master nodes check, you must configure
<>.
NOTE: The minimum master nodes check does not enforce that you've
configured <>
correctly, only that you have it configured. Elasticsearch does log a
-warning message if it detects that <> is
+warning message if it detects that
+<> is
incorrectly configured based on the number of master-eligible nodes
visible in the cluster state. Future versions of Elasticsearch will
contain stricter enforcement of
diff --git a/docs/reference/setup/sysconfig/configuring.asciidoc b/docs/reference/setup/sysconfig/configuring.asciidoc
index 0f8f0725103..84d0f832141 100644
--- a/docs/reference/setup/sysconfig/configuring.asciidoc
+++ b/docs/reference/setup/sysconfig/configuring.asciidoc
@@ -105,10 +105,10 @@ LimitMEMLOCK=infinity
==== Setting JVM system properties
The preferred method of setting Java Virtual Machine options (including
-system properties and JVM flags) is via the jvm.options configuration
-file. The default location of this file is config/jvm.options (when
+system properties and JVM flags) is via the `jvm.options` configuration
+file. The default location of this file is `config/jvm.options` (when
installing from the tar or zip distributions) and
-/etc/elasticsearch/jvm.options (when installing from the Debian or RPM
+`/etc/elasticsearch/jvm.options` (when installing from the Debian or RPM
packages). This file contains a line-delimited list of JVM arguments,
which must begin with `-`. You can add custom JVM flags to this file and
check this configuration into your version control system.
diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java
index ccb8614d87f..bac598ba282 100644
--- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java
+++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/SuggestSearchTests.java
@@ -20,27 +20,24 @@
package org.elasticsearch.messy.tests;
-import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
-import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
-import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
-import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion;
-import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
-import static org.hamcrest.Matchers.anyOf;
-import static org.hamcrest.Matchers.endsWith;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.instanceOf;
-import static org.hamcrest.Matchers.nullValue;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
+import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.action.search.SearchRequestBuilder;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.script.mustache.MustachePlugin;
+import org.elasticsearch.search.suggest.Suggest;
+import org.elasticsearch.search.suggest.SuggestBuilder;
+import org.elasticsearch.search.suggest.SuggestionBuilder;
+import org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBuilder;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
+import org.elasticsearch.test.ESIntegTestCase;
import java.io.IOException;
-import java.net.URISyntaxException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -51,32 +48,12 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ExecutionException;
-import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
-import org.elasticsearch.action.index.IndexRequestBuilder;
-import org.elasticsearch.action.search.ReduceSearchPhaseException;
-import org.elasticsearch.action.search.SearchPhaseExecutionException;
-import org.elasticsearch.action.search.SearchRequestBuilder;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.common.io.PathUtils;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.plugins.Plugin;
-import org.elasticsearch.script.mustache.MustachePlugin;
-import org.elasticsearch.search.suggest.SortBy;
-import org.elasticsearch.search.suggest.Suggest;
-import org.elasticsearch.search.suggest.SuggestBuilder;
-import org.elasticsearch.search.suggest.SuggestionBuilder;
-import org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBuilder;
-import org.elasticsearch.search.suggest.phrase.Laplace;
-import org.elasticsearch.search.suggest.phrase.LinearInterpolation;
-import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
-import org.elasticsearch.search.suggest.phrase.StupidBackoff;
-import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
-import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode;
-import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
+import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
+import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionPhraseCollateMatchExists;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize;
+import static org.hamcrest.Matchers.equalTo;
/**
* Integration tests for term and phrase suggestions. Many of these tests many requests that vary only slightly from one another. Where
@@ -90,1015 +67,6 @@ public class SuggestSearchTests extends ESIntegTestCase {
return Collections.singleton(MustachePlugin.class);
}
- // see #3196
- public void testSuggestAcrossMultipleIndices() throws IOException {
- createIndex("test");
- ensureGreen();
-
- index("test", "type1", "1", "text", "abcd");
- index("test", "type1", "2", "text", "aacd");
- index("test", "type1", "3", "text", "abbd");
- index("test", "type1", "4", "text", "abcc");
- refresh();
-
- TermSuggestionBuilder termSuggest = termSuggestion("text")
- .suggestMode(TermSuggestionBuilder.SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
- .text("abcd");
- logger.info("--> run suggestions with one index");
- searchSuggest("test", termSuggest);
- createIndex("test_1");
- ensureGreen();
-
- index("test_1", "type1", "1", "text", "ab cd");
- index("test_1", "type1", "2", "text", "aa cd");
- index("test_1", "type1", "3", "text", "ab bd");
- index("test_1", "type1", "4", "text", "ab cc");
- refresh();
- termSuggest = termSuggestion("text")
- .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
- .text("ab cd")
- .minWordLength(1);
- logger.info("--> run suggestions with two indices");
- searchSuggest("test", termSuggest);
-
-
- XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
- .startObject("properties")
- .startObject("text").field("type", "text").field("analyzer", "keyword").endObject()
- .endObject()
- .endObject().endObject();
- assertAcked(prepareCreate("test_2").addMapping("type1", mapping));
- ensureGreen();
-
- index("test_2", "type1", "1", "text", "ab cd");
- index("test_2", "type1", "2", "text", "aa cd");
- index("test_2", "type1", "3", "text", "ab bd");
- index("test_2", "type1", "4", "text", "ab cc");
- index("test_2", "type1", "1", "text", "abcd");
- index("test_2", "type1", "2", "text", "aacd");
- index("test_2", "type1", "3", "text", "abbd");
- index("test_2", "type1", "4", "text", "abcc");
- refresh();
-
- termSuggest = termSuggestion("text")
- .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
- .text("ab cd")
- .minWordLength(1);
- logger.info("--> run suggestions with three indices");
- try {
- searchSuggest("test", termSuggest);
- fail(" can not suggest across multiple indices with different analysis chains");
- } catch (ReduceSearchPhaseException ex) {
- assertThat(ex.getCause(), instanceOf(IllegalStateException.class));
- assertThat(ex.getCause().getMessage(),
- anyOf(endsWith("Suggest entries have different sizes actual [1] expected [2]"),
- endsWith("Suggest entries have different sizes actual [2] expected [1]")));
- } catch (IllegalStateException ex) {
- assertThat(ex.getMessage(), anyOf(endsWith("Suggest entries have different sizes actual [1] expected [2]"),
- endsWith("Suggest entries have different sizes actual [2] expected [1]")));
- }
-
-
- termSuggest = termSuggestion("text")
- .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
- .text("ABCD")
- .minWordLength(1);
- logger.info("--> run suggestions with four indices");
- try {
- searchSuggest("test", termSuggest);
- fail(" can not suggest across multiple indices with different analysis chains");
- } catch (ReduceSearchPhaseException ex) {
- assertThat(ex.getCause(), instanceOf(IllegalStateException.class));
- assertThat(ex.getCause().getMessage(), anyOf(endsWith("Suggest entries have different text actual [ABCD] expected [abcd]"),
- endsWith("Suggest entries have different text actual [abcd] expected [ABCD]")));
- } catch (IllegalStateException ex) {
- assertThat(ex.getMessage(), anyOf(endsWith("Suggest entries have different text actual [ABCD] expected [abcd]"),
- endsWith("Suggest entries have different text actual [abcd] expected [ABCD]")));
- }
- }
-
- // see #3037
- public void testSuggestModes() throws IOException {
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(SETTING_NUMBER_OF_SHARDS, 1)
- .put(SETTING_NUMBER_OF_REPLICAS, 0)
- .put("index.analysis.analyzer.biword.tokenizer", "standard")
- .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
- .put("index.analysis.filter.shingler.type", "shingle")
- .put("index.analysis.filter.shingler.min_shingle_size", 2)
- .put("index.analysis.filter.shingler.max_shingle_size", 3));
-
- XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
- .startObject("properties")
- .startObject("name")
- .field("type", "text")
- .startObject("fields")
- .startObject("shingled")
- .field("type", "text")
- .field("analyzer", "biword")
- .field("search_analyzer", "standard")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject().endObject();
- assertAcked(builder.addMapping("type1", mapping));
- ensureGreen();
-
-
- index("test", "type1", "1", "name", "I like iced tea");
- index("test", "type1", "2", "name", "I like tea.");
- index("test", "type1", "3", "name", "I like ice cream.");
- refresh();
-
- DirectCandidateGeneratorBuilder generator = candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2);
- PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("name.shingled")
- .addCandidateGenerator(generator)
- .gramSize(3);
- Suggest searchSuggest = searchSuggest("ice tea", "did_you_mean", phraseSuggestion);
- assertSuggestion(searchSuggest, 0, "did_you_mean", "iced tea");
-
- generator.suggestMode(null);
- searchSuggest = searchSuggest( "ice tea", "did_you_mean", phraseSuggestion);
- assertSuggestionSize(searchSuggest, 0, 0, "did_you_mean");
- }
-
- /**
- * Creates a new {@link DirectCandidateGeneratorBuilder}
- *
- * @param field
- * the field this candidate generator operates on.
- */
- private DirectCandidateGeneratorBuilder candidateGenerator(String field) {
- return new DirectCandidateGeneratorBuilder(field);
- }
-
- // see #2729
- public void testSizeOneShard() throws Exception {
- prepareCreate("test").setSettings(
- SETTING_NUMBER_OF_SHARDS, 1,
- SETTING_NUMBER_OF_REPLICAS, 0).get();
- ensureGreen();
-
- for (int i = 0; i < 15; i++) {
- index("test", "type1", Integer.toString(i), "text", "abc" + i);
- }
- refresh();
-
- SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellchecker")).get();
- assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue());
-
- TermSuggestionBuilder termSuggestion = termSuggestion("text")
- .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
- .text("abcd")
- .size(10);
- Suggest suggest = searchSuggest("test", termSuggestion);
- assertSuggestion(suggest, 0, "test", 10, "abc0");
-
- termSuggestion.text("abcd").shardSize(5);
- suggest = searchSuggest("test", termSuggestion);
- assertSuggestion(suggest, 0, "test", 5, "abc0");
- }
-
- public void testUnmappedField() throws IOException, InterruptedException, ExecutionException {
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(indexSettings())
- .put("index.analysis.analyzer.biword.tokenizer", "standard")
- .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
- .put("index.analysis.filter.shingler.type", "shingle")
- .put("index.analysis.filter.shingler.min_shingle_size", 2)
- .put("index.analysis.filter.shingler.max_shingle_size", 3));
- XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
- .startObject("properties")
- .startObject("name")
- .field("type", "text")
- .startObject("fields")
- .startObject("shingled")
- .field("type", "text")
- .field("analyzer", "biword")
- .field("search_analyzer", "standard")
- .endObject()
- .endObject()
- .endObject()
- .endObject()
- .endObject().endObject();
- assertAcked(builder.addMapping("type1", mapping));
- ensureGreen();
-
- indexRandom(true, client().prepareIndex("test", "type1").setSource("name", "I like iced tea"),
- client().prepareIndex("test", "type1").setSource("name", "I like tea."),
- client().prepareIndex("test", "type1").setSource("name", "I like ice cream."));
- refresh();
-
- PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("name.shingled")
- .addCandidateGenerator(candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2))
- .gramSize(3);
- Suggest searchSuggest = searchSuggest("ice tea", "did_you_mean", phraseSuggestion);
- assertSuggestion(searchSuggest, 0, 0, "did_you_mean", "iced tea");
-
- phraseSuggestion = phraseSuggestion("nosuchField")
- .addCandidateGenerator(candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2))
- .gramSize(3);
- {
- SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0);
- searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion));
- assertThrows(searchBuilder, SearchPhaseExecutionException.class);
- }
- {
- SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0);
- searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion));
- assertThrows(searchBuilder, SearchPhaseExecutionException.class);
- }
- }
-
- public void testSimple() throws Exception {
- createIndex("test");
- ensureGreen();
-
- index("test", "type1", "1", "text", "abcd");
- index("test", "type1", "2", "text", "aacd");
- index("test", "type1", "3", "text", "abbd");
- index("test", "type1", "4", "text", "abcc");
- refresh();
-
- SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellcecker")).get();
- assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue());
-
- TermSuggestionBuilder termSuggest = termSuggestion("text")
- .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
- .text("abcd");
- Suggest suggest = searchSuggest("test", termSuggest);
- assertSuggestion(suggest, 0, "test", "aacd", "abbd", "abcc");
- assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
-
- suggest = searchSuggest("test", termSuggest);
- assertSuggestion(suggest, 0, "test", "aacd","abbd", "abcc");
- assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
- }
-
- public void testEmpty() throws Exception {
- createIndex("test");
- ensureGreen();
-
- index("test", "type1", "1", "text", "bar");
- refresh();
-
- TermSuggestionBuilder termSuggest = termSuggestion("text")
- .suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
- .text("abcd");
- Suggest suggest = searchSuggest("test", termSuggest);
- assertSuggestionSize(suggest, 0, 0, "test");
- assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
-
- suggest = searchSuggest("test", termSuggest);
- assertSuggestionSize(suggest, 0, 0, "test");
- assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
- }
-
- public void testWithMultipleCommands() throws Exception {
- createIndex("test");
- ensureGreen();
-
- index("test", "typ1", "1", "field1", "prefix_abcd", "field2", "prefix_efgh");
- index("test", "typ1", "2", "field1", "prefix_aacd", "field2", "prefix_eeeh");
- index("test", "typ1", "3", "field1", "prefix_abbd", "field2", "prefix_efff");
- index("test", "typ1", "4", "field1", "prefix_abcc", "field2", "prefix_eggg");
- refresh();
-
- Map> suggestions = new HashMap<>();
- suggestions.put("size1", termSuggestion("field1")
- .size(1).text("prefix_abcd").maxTermFreq(10).prefixLength(1).minDocFreq(0)
- .suggestMode(SuggestMode.ALWAYS));
- suggestions.put("field2", termSuggestion("field2")
- .text("prefix_eeeh prefix_efgh")
- .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
- suggestions.put("accuracy", termSuggestion("field2")
- .text("prefix_efgh").accuracy(1f)
- .maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
- Suggest suggest = searchSuggest(null, 0, suggestions);
- assertSuggestion(suggest, 0, "size1", "prefix_aacd");
- assertThat(suggest.getSuggestion("field2").getEntries().get(0).getText().string(), equalTo("prefix_eeeh"));
- assertSuggestion(suggest, 0, "field2", "prefix_efgh");
- assertThat(suggest.getSuggestion("field2").getEntries().get(1).getText().string(), equalTo("prefix_efgh"));
- assertSuggestion(suggest, 1, "field2", "prefix_eeeh", "prefix_efff", "prefix_eggg");
- assertSuggestionSize(suggest, 0, 0, "accuracy");
- }
-
- public void testSizeAndSort() throws Exception {
- createIndex("test");
- ensureGreen();
-
- Map termsAndDocCount = new HashMap<>();
- termsAndDocCount.put("prefix_aaad", 20);
- termsAndDocCount.put("prefix_abbb", 18);
- termsAndDocCount.put("prefix_aaca", 16);
- termsAndDocCount.put("prefix_abba", 14);
- termsAndDocCount.put("prefix_accc", 12);
- termsAndDocCount.put("prefix_addd", 10);
- termsAndDocCount.put("prefix_abaa", 8);
- termsAndDocCount.put("prefix_dbca", 6);
- termsAndDocCount.put("prefix_cbad", 4);
- termsAndDocCount.put("prefix_aacd", 1);
- termsAndDocCount.put("prefix_abcc", 1);
- termsAndDocCount.put("prefix_accd", 1);
-
- for (Map.Entry entry : termsAndDocCount.entrySet()) {
- for (int i = 0; i < entry.getValue(); i++) {
- index("test", "type1", entry.getKey() + i, "field1", entry.getKey());
- }
- }
- refresh();
-
- Map> suggestions = new HashMap<>();
- suggestions.put("size3SortScoreFirst", termSuggestion("field1")
- .size(3).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
- suggestions.put("size10SortScoreFirst", termSuggestion("field1")
- .size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS).shardSize(50));
- suggestions.put("size3SortScoreFirstMaxEdits1", termSuggestion("field1")
- .maxEdits(1)
- .size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
- suggestions.put("size10SortFrequencyFirst", termSuggestion("field1")
- .size(10).sort(SortBy.FREQUENCY).shardSize(1000)
- .minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
- Suggest suggest = searchSuggest("prefix_abcd", 0, suggestions);
-
- // The commented out assertions fail sometimes because suggestions are based off of shard frequencies instead of index frequencies.
- assertSuggestion(suggest, 0, "size3SortScoreFirst", "prefix_aacd", "prefix_abcc", "prefix_accd");
- assertSuggestion(suggest, 0, "size10SortScoreFirst", 10, "prefix_aacd", "prefix_abcc", "prefix_accd" /*, "prefix_aaad" */);
- assertSuggestion(suggest, 0, "size3SortScoreFirstMaxEdits1", "prefix_aacd", "prefix_abcc", "prefix_accd");
- assertSuggestion(suggest, 0, "size10SortFrequencyFirst", "prefix_aaad", "prefix_abbb", "prefix_aaca", "prefix_abba",
- "prefix_accc", "prefix_addd", "prefix_abaa", "prefix_dbca", "prefix_cbad", "prefix_aacd");
-
- // assertThat(suggest.get(3).getSuggestedWords().get("prefix_abcd").get(4).getTerm(), equalTo("prefix_abcc"));
- // assertThat(suggest.get(3).getSuggestedWords().get("prefix_abcd").get(4).getTerm(), equalTo("prefix_accd"));
- }
-
- // see #2817
- public void testStopwordsOnlyPhraseSuggest() throws IOException {
- assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=text,analyzer=stopwd").setSettings(
- Settings.builder()
- .put("index.analysis.analyzer.stopwd.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.stopwd.filter", "stop")
- ));
- ensureGreen();
- index("test", "typ1", "1", "body", "this is a test");
- refresh();
-
- Suggest searchSuggest = searchSuggest( "a an the", "simple_phrase",
- phraseSuggestion("body").gramSize(1)
- .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
- .size(1));
- assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
- }
-
- public void testPrefixLength() throws IOException {
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(SETTING_NUMBER_OF_SHARDS, 1)
- .put("index.analysis.analyzer.reverse.tokenizer", "standard")
- .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse")
- .put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
- .put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
- .put("index.analysis.filter.my_shingle.type", "shingle")
- .put("index.analysis.filter.my_shingle.output_unigrams", false)
- .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
- .put("index.analysis.filter.my_shingle.max_shingle_size", 2));
- XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
- .startObject("_all").field("store", true).field("term_vector", "with_positions_offsets").endObject()
- .startObject("properties")
- .startObject("body").field("type", "text").field("analyzer", "body").endObject()
- .startObject("body_reverse").field("type", "text").field("analyzer", "reverse").endObject()
- .startObject("bigram").field("type", "text").field("analyzer", "bigram").endObject()
- .endObject()
- .endObject().endObject();
- assertAcked(builder.addMapping("type1", mapping));
- ensureGreen();
-
- index("test", "type1", "1", "body", "hello world");
- index("test", "type1", "2", "body", "hello world");
- index("test", "type1", "3", "body", "hello words");
- refresh();
-
- Suggest searchSuggest = searchSuggest( "hello word", "simple_phrase",
- phraseSuggestion("body")
- .addCandidateGenerator(candidateGenerator("body").prefixLength(4).minWordLength(1).suggestMode("always"))
- .size(1).confidence(1.0f));
- assertSuggestion(searchSuggest, 0, "simple_phrase", "hello words");
-
- searchSuggest = searchSuggest( "hello word", "simple_phrase",
- phraseSuggestion("body")
- .addCandidateGenerator(candidateGenerator("body").prefixLength(2).minWordLength(1).suggestMode("always"))
- .size(1).confidence(1.0f));
- assertSuggestion(searchSuggest, 0, "simple_phrase", "hello world");
- }
-
- @Nightly
- public void testMarvelHerosPhraseSuggest() throws IOException, URISyntaxException {
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(indexSettings())
- .put("index.analysis.analyzer.reverse.tokenizer", "standard")
- .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse")
- .put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
- .put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
- .put("index.analysis.filter.my_shingle.type", "shingle")
- .put("index.analysis.filter.my_shingle.output_unigrams", false)
- .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
- .put("index.analysis.filter.my_shingle.max_shingle_size", 2));
- XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
- .startObject("_all")
- .field("store", true)
- .field("term_vector", "with_positions_offsets")
- .endObject()
- .startObject("properties")
- .startObject("body").
- field("type", "text").
- field("analyzer", "body")
- .endObject()
- .startObject("body_reverse").
- field("type", "text").
- field("analyzer", "reverse")
- .endObject()
- .startObject("bigram").
- field("type", "text").
- field("analyzer", "bigram")
- .endObject()
- .endObject()
- .endObject().endObject();
- assertAcked(builder.addMapping("type1", mapping));
- ensureGreen();
-
- for (String line : readMarvelHeroNames()) {
- index("test", "type1", line, "body", line, "body_reverse", line, "bigram", line);
- }
- refresh();
-
- PhraseSuggestionBuilder phraseSuggest = phraseSuggestion("bigram").gramSize(2).analyzer("body")
- .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
- .size(1);
- Suggest searchSuggest = searchSuggest( "american ame", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace");
- assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("american ame"));
-
- phraseSuggest.realWordErrorLikelihood(0.95f);
- searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
- // Check the "text" field this one time.
- assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel"));
-
- // Ask for highlighting
- phraseSuggest.highlight("", "");
- searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
- assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getOptions().get(0).getHighlighted().string(), equalTo("xorr the god jewel"));
-
- // pass in a correct phrase
- phraseSuggest.highlight(null, null).confidence(0f).size(1).maxErrors(0.5f);
- searchSuggest = searchSuggest( "Xorr the God-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
-
- // pass in a correct phrase - set confidence to 2
- phraseSuggest.confidence(2f);
- searchSuggest = searchSuggest( "Xorr the God-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
-
- // pass in a correct phrase - set confidence to 0.99
- phraseSuggest.confidence(0.99f);
- searchSuggest = searchSuggest( "Xorr the God-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
-
- //test reverse suggestions with pre & post filter
- phraseSuggest
- .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
- .addCandidateGenerator(candidateGenerator("body_reverse").minWordLength(1).suggestMode("always").preFilter("reverse").postFilter("reverse"));
- searchSuggest = searchSuggest( "xor the yod-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
-
- // set all mass to trigrams (not indexed)
- phraseSuggest.clearCandidateGenerators()
- .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
- .smoothingModel(new LinearInterpolation(1,0,0));
- searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
-
- // set all mass to bigrams
- phraseSuggest.smoothingModel(new LinearInterpolation(0,1,0));
- searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
-
- // distribute mass
- phraseSuggest.smoothingModel(new LinearInterpolation(0.4,0.4,0.2));
- searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
-
- searchSuggest = searchSuggest( "american ame", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace");
-
- // try all smoothing methods
- phraseSuggest.smoothingModel(new LinearInterpolation(0.4,0.4,0.2));
- searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
-
- phraseSuggest.smoothingModel(new Laplace(0.2));
- searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
-
- phraseSuggest.smoothingModel(new StupidBackoff(0.1));
- searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase",phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
-
- // check tokenLimit
- phraseSuggest.smoothingModel(null).tokenLimit(4);
- searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
-
- phraseSuggest.tokenLimit(15).smoothingModel(new StupidBackoff(0.1));
- searchSuggest = searchSuggest( "Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel", "simple_phrase", phraseSuggest);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel xorr the god jewel xorr the god jewel");
- // Check the name this time because we're repeating it which is funky
- assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel"));
- }
-
- private List readMarvelHeroNames() throws IOException, URISyntaxException {
- return Files.readAllLines(PathUtils.get(Suggest.class.getResource("/config/names.txt").toURI()), StandardCharsets.UTF_8);
- }
-
- public void testSizeParam() throws IOException {
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(SETTING_NUMBER_OF_SHARDS, 1)
- .put("index.analysis.analyzer.reverse.tokenizer", "standard")
- .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse")
- .put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
- .put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
- .put("index.analysis.filter.my_shingle.type", "shingle")
- .put("index.analysis.filter.my_shingle.output_unigrams", false)
- .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
- .put("index.analysis.filter.my_shingle.max_shingle_size", 2));
-
- XContentBuilder mapping = XContentFactory.jsonBuilder()
- .startObject()
- .startObject("type1")
- .startObject("_all")
- .field("store", true)
- .field("term_vector", "with_positions_offsets")
- .endObject()
- .startObject("properties")
- .startObject("body")
- .field("type", "text")
- .field("analyzer", "body")
- .endObject()
- .startObject("body_reverse")
- .field("type", "text")
- .field("analyzer", "reverse")
- .endObject()
- .startObject("bigram")
- .field("type", "text")
- .field("analyzer", "bigram")
- .endObject()
- .endObject()
- .endObject()
- .endObject();
- assertAcked(builder.addMapping("type1", mapping));
- ensureGreen();
-
- String line = "xorr the god jewel";
- index("test", "type1", "1", "body", line, "body_reverse", line, "bigram", line);
- line = "I got it this time";
- index("test", "type1", "2", "body", line, "body_reverse", line, "bigram", line);
- refresh();
-
- PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("bigram")
- .realWordErrorLikelihood(0.95f)
- .gramSize(2)
- .analyzer("body")
- .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(1).accuracy(0.1f))
- .smoothingModel(new StupidBackoff(0.1))
- .maxErrors(1.0f)
- .size(5);
- Suggest searchSuggest = searchSuggest("Xorr the Gut-Jewel", "simple_phrase", phraseSuggestion);
- assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
-
- // we allow a size of 2 now on the shard generator level so "god" will be found since it's LD2
- phraseSuggestion.clearCandidateGenerators()
- .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(2).accuracy(0.1f));
- searchSuggest = searchSuggest( "Xorr the Gut-Jewel", "simple_phrase", phraseSuggestion);
- assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
- }
-
- @Nightly
- public void testPhraseBoundaryCases() throws IOException, URISyntaxException {
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(indexSettings()).put(SETTING_NUMBER_OF_SHARDS, 1) // to get reliable statistics we should put this all into one shard
- .put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
- .put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
- .put("index.analysis.analyzer.ngram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.ngram.filter", "my_shingle2", "lowercase")
- .put("index.analysis.analyzer.myDefAnalyzer.tokenizer", "standard")
- .putArray("index.analysis.analyzer.myDefAnalyzer.filter", "shingle", "lowercase")
- .put("index.analysis.filter.my_shingle.type", "shingle")
- .put("index.analysis.filter.my_shingle.output_unigrams", false)
- .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
- .put("index.analysis.filter.my_shingle.max_shingle_size", 2)
- .put("index.analysis.filter.my_shingle2.type", "shingle")
- .put("index.analysis.filter.my_shingle2.output_unigrams", true)
- .put("index.analysis.filter.my_shingle2.min_shingle_size", 2)
- .put("index.analysis.filter.my_shingle2.max_shingle_size", 2));
-
- XContentBuilder mapping = XContentFactory.jsonBuilder()
- .startObject().startObject("type1")
- .startObject("_all").field("store", true).field("term_vector", "with_positions_offsets").endObject()
- .startObject("properties")
- .startObject("body").field("type", "text").field("analyzer", "body").endObject()
- .startObject("bigram").field("type", "text").field("analyzer", "bigram").endObject()
- .startObject("ngram").field("type", "text").field("analyzer", "ngram").endObject()
- .endObject()
- .endObject().endObject();
- assertAcked(builder.addMapping("type1", mapping));
- ensureGreen();
-
- for (String line : readMarvelHeroNames()) {
- index("test", "type1", line, "body", line, "bigram", line, "ngram", line);
- }
- refresh();
-
- NumShards numShards = getNumShards("test");
-
- // Lets make sure some things throw exceptions
- PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("bigram")
- .analyzer("body")
- .addCandidateGenerator(candidateGenerator("does_not_exist").minWordLength(1).suggestMode("always"))
- .realWordErrorLikelihood(0.95f)
- .maxErrors(0.5f)
- .size(1);
- Map> suggestion = new HashMap<>();
- suggestion.put("simple_phrase", phraseSuggestion);
- try {
- searchSuggest("Xor the Got-Jewel", numShards.numPrimaries, suggestion);
- fail("field does not exists");
- } catch (SearchPhaseExecutionException e) {}
-
- phraseSuggestion.clearCandidateGenerators().analyzer(null);
- try {
- searchSuggest("Xor the Got-Jewel", numShards.numPrimaries, suggestion);
- fail("analyzer does only produce ngrams");
- } catch (SearchPhaseExecutionException e) {
- }
-
- phraseSuggestion.analyzer("bigram");
- try {
- searchSuggest("Xor the Got-Jewel", numShards.numPrimaries, suggestion);
- fail("analyzer does only produce ngrams");
- } catch (SearchPhaseExecutionException e) {
- }
-
- // Now we'll make sure some things don't
- phraseSuggestion.forceUnigrams(false);
- searchSuggest( "Xor the Got-Jewel", 0, suggestion);
-
- // Field doesn't produce unigrams but the analyzer does
- phraseSuggestion.forceUnigrams(true).analyzer("ngram");
- searchSuggest( "Xor the Got-Jewel", 0, suggestion);
-
- phraseSuggestion = phraseSuggestion("ngram")
- .analyzer("myDefAnalyzer")
- .forceUnigrams(true)
- .realWordErrorLikelihood(0.95f)
- .maxErrors(0.5f)
- .size(1)
- .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"));
- Suggest suggest = searchSuggest( "Xor the Got-Jewel", 0, suggestion);
-
- // "xorr the god jewel" and and "xorn the god jewel" have identical scores (we are only using unigrams to score), so we tie break by
- // earlier term (xorn):
- assertSuggestion(suggest, 0, "simple_phrase", "xorn the god jewel");
-
- phraseSuggestion.analyzer(null);
- suggest = searchSuggest( "Xor the Got-Jewel", 0, suggestion);
-
- // In this case xorr has a better score than xorn because we set the field back to the default (my_shingle2) analyzer, so the
- // probability that the term is not in the dictionary but is NOT a misspelling is relatively high in this case compared to the
- // others that have no n-gram with the other terms in the phrase :) you can set this realWorldErrorLikelyhood
- assertSuggestion(suggest, 0, "simple_phrase", "xorr the god jewel");
- }
-
- public void testDifferentShardSize() throws Exception {
- createIndex("test");
- ensureGreen();
- indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "foobar1").setRouting("1"),
- client().prepareIndex("test", "type1", "2").setSource("field1", "foobar2").setRouting("2"),
- client().prepareIndex("test", "type1", "3").setSource("field1", "foobar3").setRouting("3"));
-
- Suggest suggest = searchSuggest( "foobar", "simple",
- termSuggestion("field1")
- .size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
- ElasticsearchAssertions.assertSuggestionSize(suggest, 0, 3, "simple");
- }
-
- // see #3469
- public void testShardFailures() throws IOException, InterruptedException {
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(indexSettings())
- .put("index.analysis.analyzer.suggest.tokenizer", "standard")
- .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
- .put("index.analysis.filter.shingler.type", "shingle")
- .put("index.analysis.filter.shingler.min_shingle_size", 2)
- .put("index.analysis.filter.shingler.max_shingle_size", 5)
- .put("index.analysis.filter.shingler.output_unigrams", true));
-
- XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type2")
- .startObject("properties")
- .startObject("name")
- .field("type", "text")
- .field("analyzer", "suggest")
- .endObject()
- .endObject()
- .endObject().endObject();
- assertAcked(builder.addMapping("type2", mapping));
- ensureGreen();
-
- index("test", "type2", "1", "foo", "bar");
- index("test", "type2", "2", "foo", "bar");
- index("test", "type2", "3", "foo", "bar");
- index("test", "type2", "4", "foo", "bar");
- index("test", "type2", "5", "foo", "bar");
- index("test", "type2", "1", "name", "Just testing the suggestions api");
- index("test", "type2", "2", "name", "An other title about equal length");
- // Note that the last document has to have about the same length as the other or cutoff rechecking will remove the useful suggestion.
- refresh();
-
- // When searching on a shard with a non existing mapping, we should fail
- SearchRequestBuilder request = client().prepareSearch().setSize(0)
- .suggest(
- new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean",
- phraseSuggestion("fielddoesnotexist").maxErrors(5.0f)));
- assertThrows(request, SearchPhaseExecutionException.class);
-
- // When searching on a shard which does not hold yet any document of an existing type, we should not fail
- SearchResponse searchResponse = client().prepareSearch().setSize(0)
- .suggest(
- new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean",
- phraseSuggestion("name").maxErrors(5.0f)))
- .get();
- ElasticsearchAssertions.assertNoFailures(searchResponse);
- ElasticsearchAssertions.assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions");
- }
-
- // see #3469
- public void testEmptyShards() throws IOException, InterruptedException {
- XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().
- startObject().
- startObject("type1").
- startObject("properties").
- startObject("name").
- field("type", "text").
- field("analyzer", "suggest").
- endObject().
- endObject().
- endObject().
- endObject();
- assertAcked(prepareCreate("test").setSettings(Settings.builder()
- .put(indexSettings())
- .put("index.analysis.analyzer.suggest.tokenizer", "standard")
- .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
- .put("index.analysis.filter.shingler.type", "shingle")
- .put("index.analysis.filter.shingler.min_shingle_size", 2)
- .put("index.analysis.filter.shingler.max_shingle_size", 5)
- .put("index.analysis.filter.shingler.output_unigrams", true)).addMapping("type1", mappingBuilder));
- ensureGreen();
-
- index("test", "type2", "1", "foo", "bar");
- index("test", "type2", "2", "foo", "bar");
- index("test", "type1", "1", "name", "Just testing the suggestions api");
- index("test", "type1", "2", "name", "An other title about equal length");
- refresh();
-
- SearchResponse searchResponse = client().prepareSearch()
- .setSize(0)
- .suggest(
- new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean",
- phraseSuggestion("name").maxErrors(5.0f)))
- .get();
-
- assertNoFailures(searchResponse);
- assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions");
- }
-
- /**
- * Searching for a rare phrase shouldn't provide any suggestions if confidence > 1. This was possible before we rechecked the cutoff
- * score during the reduce phase. Failures don't occur every time - maybe two out of five tries but we don't repeat it to save time.
- */
- public void testSearchForRarePhrase() throws IOException {
- // If there isn't enough chaf per shard then shards can become unbalanced, making the cutoff recheck this is testing do more harm then good.
- int chafPerShard = 100;
-
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(indexSettings())
- .put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase", "my_shingle")
- .put("index.analysis.filter.my_shingle.type", "shingle")
- .put("index.analysis.filter.my_shingle.output_unigrams", true)
- .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
- .put("index.analysis.filter.my_shingle.max_shingle_size", 2));
-
- XContentBuilder mapping = XContentFactory.jsonBuilder()
- .startObject()
- .startObject("type1")
- .startObject("_all")
- .field("store", true)
- .field("term_vector", "with_positions_offsets")
- .endObject()
- .startObject("properties")
- .startObject("body")
- .field("type", "text")
- .field("analyzer", "body")
- .endObject()
- .endObject()
- .endObject()
- .endObject();
- assertAcked(builder.addMapping("type1", mapping));
- ensureGreen();
-
- NumShards test = getNumShards("test");
-
- List phrases = new ArrayList<>();
- Collections.addAll(phrases, "nobel prize", "noble gases", "somethingelse prize", "pride and joy", "notes are fun");
- for (int i = 0; i < 8; i++) {
- phrases.add("noble somethingelse" + i);
- }
- for (int i = 0; i < test.numPrimaries * chafPerShard; i++) {
- phrases.add("chaff" + i);
- }
- for (String phrase: phrases) {
- index("test", "type1", phrase, "body", phrase);
- }
- refresh();
-
- Suggest searchSuggest = searchSuggest("nobel prize", "simple_phrase", phraseSuggestion("body")
- .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f))
- .confidence(2f)
- .maxErrors(5f)
- .size(1));
- assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
-
- searchSuggest = searchSuggest("noble prize", "simple_phrase", phraseSuggestion("body")
- .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f))
- .confidence(2f)
- .maxErrors(5f)
- .size(1));
- assertSuggestion(searchSuggest, 0, 0, "simple_phrase", "nobel prize");
- }
-
- @Nightly
- public void testSuggestWithManyCandidates() throws InterruptedException, ExecutionException, IOException {
- CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
- .put(indexSettings())
- .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
- .put("index.analysis.analyzer.text.tokenizer", "standard")
- .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
- .put("index.analysis.filter.my_shingle.type", "shingle")
- .put("index.analysis.filter.my_shingle.output_unigrams", true)
- .put("index.analysis.filter.my_shingle.min_shingle_size", 2)
- .put("index.analysis.filter.my_shingle.max_shingle_size", 3));
-
- XContentBuilder mapping = XContentFactory.jsonBuilder()
- .startObject()
- .startObject("type1")
- .startObject("properties")
- .startObject("title")
- .field("type", "text")
- .field("analyzer", "text")
- .endObject()
- .endObject()
- .endObject()
- .endObject();
- assertAcked(builder.addMapping("type1", mapping));
- ensureGreen();
-
- List titles = new ArrayList<>();
-
- // We're going to be searching for:
- // united states house of representatives elections in washington 2006
- // But we need to make sure we generate a ton of suggestions so we add a bunch of candidates.
- // Many of these candidates are drawn from page names on English Wikipedia.
-
- // Tons of different options very near the exact query term
- titles.add("United States House of Representatives Elections in Washington 1789");
- for (int year = 1790; year < 2014; year+= 2) {
- titles.add("United States House of Representatives Elections in Washington " + year);
- }
- // Six of these are near enough to be viable suggestions, just not the top one
-
- // But we can't stop there! Titles that are just a year are pretty common so lets just add one per year
- // since 0. Why not?
- for (int year = 0; year < 2015; year++) {
- titles.add(Integer.toString(year));
- }
- // That ought to provide more less good candidates for the last term
-
- // Now remove or add plural copies of every term we can
- titles.add("State");
- titles.add("Houses of Parliament");
- titles.add("Representative Government");
- titles.add("Election");
-
- // Now some possessive
- titles.add("Washington's Birthday");
-
- // And some conjugation
- titles.add("Unified Modeling Language");
- titles.add("Unite Against Fascism");
- titles.add("Stated Income Tax");
- titles.add("Media organizations housed within colleges");
-
- // And other stuff
- titles.add("Untied shoelaces");
- titles.add("Unit circle");
- titles.add("Untitled");
- titles.add("Unicef");
- titles.add("Unrated");
- titles.add("UniRed");
- titles.add("Jalan Uniten–Dengkil"); // Highway in Malaysia
- titles.add("UNITAS");
- titles.add("UNITER");
- titles.add("Un-Led-Ed");
- titles.add("STATS LLC");
- titles.add("Staples");
- titles.add("Skates");
- titles.add("Statues of the Liberators");
- titles.add("Staten Island");
- titles.add("Statens Museum for Kunst");
- titles.add("Hause"); // The last name or the German word, whichever.
- titles.add("Hose");
- titles.add("Hoses");
- titles.add("Howse Peak");
- titles.add("The Hoose-Gow");
- titles.add("Hooser");
- titles.add("Electron");
- titles.add("Electors");
- titles.add("Evictions");
- titles.add("Coronal mass ejection");
- titles.add("Wasington"); // A film?
- titles.add("Warrington"); // A town in England
- titles.add("Waddington"); // Lots of places have this name
- titles.add("Watlington"); // Ditto
- titles.add("Waplington"); // Yup, also a town
- titles.add("Washing of the Spears"); // Book
-
- for (char c = 'A'; c <= 'Z'; c++) {
- // Can't forget lists, glorious lists!
- titles.add("List of former members of the United States House of Representatives (" + c + ")");
-
- // Lots of people are named Washington . LastName
- titles.add("Washington " + c + ". Lastname");
-
- // Lets just add some more to be evil
- titles.add("United " + c);
- titles.add("States " + c);
- titles.add("House " + c);
- titles.add("Elections " + c);
- titles.add("2006 " + c);
- titles.add(c + " United");
- titles.add(c + " States");
- titles.add(c + " House");
- titles.add(c + " Elections");
- titles.add(c + " 2006");
- }
-
- List builders = new ArrayList<>();
- for (String title: titles) {
- builders.add(client().prepareIndex("test", "type1").setSource("title", title));
- }
- indexRandom(true, builders);
-
- PhraseSuggestionBuilder suggest = phraseSuggestion("title")
- .addCandidateGenerator(candidateGenerator("title")
- .suggestMode("always")
- .maxTermFreq(.99f)
- .size(1000) // Setting a silly high size helps of generate a larger list of candidates for testing.
- .maxInspections(1000) // This too
- )
- .confidence(0f)
- .maxErrors(2f)
- .shardSize(30000)
- .size(30000);
- Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest);
- assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006");
- assertSuggestionSize(searchSuggest, 0, 25480, "title"); // Just to prove that we've run through a ton of options
-
- suggest.size(1);
- long start = System.currentTimeMillis();
- searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest);
- long total = System.currentTimeMillis() - start;
- assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006");
- // assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging
- }
-
public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionException, IOException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
@@ -1141,7 +109,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
// suggest without collate
PhraseSuggestionBuilder suggest = phraseSuggestion("title")
- .addCandidateGenerator(candidateGenerator("title")
+ .addCandidateGenerator(new DirectCandidateGeneratorBuilder("title")
.suggestMode("always")
.maxTermFreq(.99f)
.size(10)
@@ -1201,7 +169,8 @@ public class SuggestSearchTests extends ESIntegTestCase {
.string();
PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(filterStringAsFilter);
- searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredFilterSuggest);
+ searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title",
+ filteredFilterSuggest);
assertSuggestionSize(searchSuggest, 0, 2, "title");
// collate suggest with bad query
@@ -1245,12 +214,15 @@ public class SuggestSearchTests extends ESIntegTestCase {
params.put("query_field", "title");
PhraseSuggestionBuilder phraseSuggestWithParams = suggest.collateQuery(collateWithParams).collateParams(params);
- searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", phraseSuggestWithParams);
+ searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title",
+ phraseSuggestWithParams);
assertSuggestionSize(searchSuggest, 0, 2, "title");
// collate query request with prune set to true
- PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateQuery(collateWithParams).collateParams(params).collatePrune(true);
- searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", phraseSuggestWithParamsAndReturn);
+ PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateQuery(collateWithParams).collateParams(params)
+ .collatePrune(true);
+ searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title",
+ phraseSuggestWithParamsAndReturn);
assertSuggestionSize(searchSuggest, 0, 10, "title");
assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2);
}
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java
index ac8e7b89b79..0923cffc59d 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java
@@ -43,6 +43,11 @@ public final class Definition {
"java.lang.txt",
"java.math.txt",
"java.text.txt",
+ "java.time.txt",
+ "java.time.chrono.txt",
+ "java.time.format.txt",
+ "java.time.temporal.txt",
+ "java.time.zone.txt",
"java.util.txt",
"java.util.function.txt",
"java.util.stream.txt"));
diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.chrono.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.chrono.txt
new file mode 100644
index 00000000000..8dd0478605e
--- /dev/null
+++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.chrono.txt
@@ -0,0 +1,336 @@
+#
+# Licensed to Elasticsearch under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Elasticsearch licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+#
+# Painless definition file. This defines the hierarchy of classes,
+# what methods and fields they have, etc.
+#
+
+#### Interfaces
+
+class ChronoLocalDate -> java.time.chrono.ChronoLocalDate extends Comparable,Temporal,TemporalAccessor,TemporalAdjuster {
+ ChronoLocalDateTime atTime(LocalTime)
+ boolean equals(Object)
+ String format(DateTimeFormatter)
+ ChronoLocalDate from(TemporalAccessor)
+ Chronology getChronology()
+ Era getEra()
+ int hashCode()
+ boolean isAfter(ChronoLocalDate)
+ boolean isBefore(ChronoLocalDate)
+ boolean isEqual(ChronoLocalDate)
+ boolean isLeapYear()
+ int lengthOfMonth()
+ int lengthOfYear()
+ ChronoLocalDate minus(TemporalAmount)
+ ChronoLocalDate minus(long,TemporalUnit)
+ ChronoLocalDate plus(TemporalAmount)
+ ChronoLocalDate plus(long,TemporalUnit)
+ Comparator timeLineOrder()
+ long toEpochDay()
+ String toString()
+ ChronoPeriod until(ChronoLocalDate)
+ ChronoLocalDate with(TemporalAdjuster)
+ ChronoLocalDate with(TemporalField,long)
+}
+
+class ChronoLocalDateTime -> java.time.chrono.ChronoLocalDateTime extends Comparable,Temporal,TemporalAccessor,TemporalAdjuster {
+ ChronoZonedDateTime atZone(ZoneId)
+ boolean equals(Object)
+ String format(DateTimeFormatter)
+ ChronoLocalDateTime from(TemporalAccessor)
+ int hashCode()
+ boolean isAfter(ChronoLocalDateTime)
+ boolean isBefore(ChronoLocalDateTime)
+ boolean isEqual(ChronoLocalDateTime)
+ Chronology getChronology()
+ ChronoLocalDateTime minus(TemporalAmount)
+ ChronoLocalDateTime minus(long,TemporalUnit)
+ ChronoLocalDateTime plus(TemporalAmount)
+ ChronoLocalDateTime plus(long,TemporalUnit)
+ Comparator timeLineOrder()
+ long toEpochSecond(ZoneOffset)
+ Instant toInstant(ZoneOffset)
+ ChronoLocalDate toLocalDate()
+ LocalTime toLocalTime()
+ String toString()
+ ChronoLocalDateTime with(TemporalAdjuster)
+ ChronoLocalDateTime with(TemporalField,long)
+}
+
+class Chronology -> java.time.chrono.Chronology extends Comparable {
+ ChronoLocalDate date(TemporalAccessor)
+ ChronoLocalDate date(Era,int,int,int)
+ ChronoLocalDate date(int,int,int)
+ ChronoLocalDate dateEpochDay(long)
+ ChronoLocalDate dateNow()
+ ChronoLocalDate dateNow(ZoneId)
+ ChronoLocalDate dateYearDay(Era,int,int)
+ ChronoLocalDate dateYearDay(int,int)
+ boolean equals(Object)
+ Era eraOf(int)
+ List eras()
+ Chronology from(TemporalAccessor)
+ Set getAvailableChronologies()
+ String getDisplayName(TextStyle,Locale)
+ String getId()
+ String getCalendarType()
+ int hashCode()
+ boolean isLeapYear(long)
+ ChronoLocalDateTime localDateTime(TemporalAccessor)
+ Chronology of(String)
+ Chronology ofLocale(Locale)
+ ChronoPeriod period(int,int,int)
+ int prolepticYear(Era,int)
+ ValueRange range(ChronoField)
+ ChronoLocalDate resolveDate(Map,ResolverStyle)
+ String toString()
+ ChronoZonedDateTime zonedDateTime(TemporalAccessor)
+ ChronoZonedDateTime zonedDateTime(Instant,ZoneId)
+}
+
+class ChronoPeriod -> java.time.chrono.ChronoPeriod extends TemporalAmount {
+ ChronoPeriod between(ChronoLocalDate,ChronoLocalDate)
+ boolean equals(Object)
+ Chronology getChronology()
+ List getUnits()
+ int hashCode()
+ boolean isNegative()
+ boolean isZero()
+ ChronoPeriod minus(TemporalAmount)
+ ChronoPeriod multipliedBy(int)
+ ChronoPeriod negated()
+ ChronoPeriod normalized()
+ ChronoPeriod plus(TemporalAmount)
+ String toString()
+}
+
+class ChronoZonedDateTime -> java.time.chrono.ChronoZonedDateTime extends Comparable,Temporal,TemporalAccessor {
+ boolean equals(Object)
+ String format(DateTimeFormatter)
+ ChronoZonedDateTime from(TemporalAccessor)
+ Chronology getChronology()
+ ZoneOffset getOffset()
+ ZoneId getZone()
+ int hashCode()
+ boolean isBefore(ChronoZonedDateTime)
+ boolean isAfter(ChronoZonedDateTime)
+ boolean isEqual(ChronoZonedDateTime)
+ ChronoZonedDateTime minus(TemporalAmount)
+ ChronoZonedDateTime minus(long,TemporalUnit)
+ ChronoZonedDateTime plus(TemporalAmount)
+ ChronoZonedDateTime plus(long,TemporalUnit)
+ Comparator timeLineOrder()
+ Instant toInstant()
+ long toEpochSecond()
+ ChronoLocalDate toLocalDate()
+ LocalTime toLocalTime()
+ ChronoLocalDateTime toLocalDateTime()
+ String toString()
+ ChronoZonedDateTime with(TemporalAdjuster)
+ ChronoZonedDateTime with(TemporalField,long)
+ ChronoZonedDateTime withEarlierOffsetAtOverlap()
+ ChronoZonedDateTime withLaterOffsetAtOverlap()
+ ChronoZonedDateTime withZoneSameLocal(ZoneId)
+ ChronoZonedDateTime withZoneSameInstant(ZoneId)
+}
+
+class Era -> java.time.chrono.Era extends TemporalAccessor,TemporalAdjuster {
+ String getDisplayName(TextStyle,Locale)
+ int getValue()
+}
+
+#### Classes
+
+class AbstractChronology -> java.time.chrono.Chronology extends Comparable,Chronology,Object {
+}
+
+class HijrahChronology -> java.time.chrono.HijrahChronology extends AbstractChronology,Comparable,Chronology,Object {
+ HijrahChronology INSTANCE
+ HijrahDate date(TemporalAccessor)
+ HijrahDate date(int,int,int)
+ HijrahDate date(Era,int,int,int)
+ HijrahDate dateEpochDay(long)
+ HijrahDate dateNow()
+ HijrahDate dateNow(ZoneId)
+ HijrahDate dateYearDay(int,int)
+ HijrahDate dateYearDay(Era,int,int)
+ HijrahEra eraOf(int)
+}
+
+class HijrahDate -> java.time.chrono.HijrahDate extends Comparable,ChronoLocalDate,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ HijrahDate now()
+ HijrahDate now(ZoneId)
+ HijrahDate of(int,int,int)
+ HijrahDate from(TemporalAccessor)
+ HijrahChronology getChronology()
+ HijrahEra getEra()
+ HijrahDate with(TemporalField,long)
+ HijrahDate with(TemporalAdjuster)
+ HijrahDate withVariant(HijrahChronology)
+ HijrahDate plus(TemporalAmount)
+ HijrahDate minus(TemporalAmount)
+ HijrahDate plus(long,TemporalUnit)
+ HijrahDate minus(long,TemporalUnit)
+}
+
+class IsoChronology -> java.time.chrono.IsoChronology extends AbstractChronology,Comparable,Chronology,Object {
+ IsoChronology INSTANCE
+ LocalDate date(TemporalAccessor)
+ LocalDate date(int,int,int)
+ LocalDate date(Era,int,int,int)
+ LocalDate dateEpochDay(long)
+ LocalDate dateNow()
+ LocalDate dateNow(ZoneId)
+ LocalDate dateYearDay(int,int)
+ LocalDate dateYearDay(Era,int,int)
+ IsoEra eraOf(int)
+}
+
+class JapaneseChronology -> java.time.chrono.JapaneseChronology extends AbstractChronology,Comparable,Chronology,Object {
+ JapaneseChronology INSTANCE
+ JapaneseDate date(TemporalAccessor)
+ JapaneseDate date(int,int,int)
+ JapaneseDate date(Era,int,int,int)
+ JapaneseDate dateEpochDay(long)
+ JapaneseDate dateNow()
+ JapaneseDate dateNow(ZoneId)
+ JapaneseDate dateYearDay(int,int)
+ JapaneseDate dateYearDay(Era,int,int)
+ JapaneseEra eraOf(int)
+}
+
+class JapaneseDate -> java.time.chrono.JapaneseDate extends Comparable,ChronoLocalDate,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ JapaneseDate now()
+ JapaneseDate now(ZoneId)
+ JapaneseDate of(int,int,int)
+ JapaneseDate from(TemporalAccessor)
+ JapaneseChronology getChronology()
+ JapaneseEra getEra()
+ JapaneseDate with(TemporalField,long)
+ JapaneseDate with(TemporalAdjuster)
+ JapaneseDate plus(TemporalAmount)
+ JapaneseDate minus(TemporalAmount)
+ JapaneseDate plus(long,TemporalUnit)
+ JapaneseDate minus(long,TemporalUnit)
+}
+
+class JapaneseEra -> java.time.chrono.JapaneseEra extends Era,TemporalAccessor,TemporalAdjuster,Object {
+ JapaneseEra HEISEI
+ JapaneseEra MEIJI
+ JapaneseEra SHOWA
+ JapaneseEra TAISHO
+ int getValue()
+ JapaneseEra of(int)
+ JapaneseEra valueOf(String)
+ JapaneseEra[] values()
+}
+
+class MinguoChronology -> java.time.chrono.MinguoChronology extends AbstractChronology,Comparable,Chronology,Object {
+ MinguoChronology INSTANCE
+ MinguoDate date(TemporalAccessor)
+ MinguoDate date(int,int,int)
+ MinguoDate date(Era,int,int,int)
+ MinguoDate dateEpochDay(long)
+ MinguoDate dateNow()
+ MinguoDate dateNow(ZoneId)
+ MinguoDate dateYearDay(int,int)
+ MinguoDate dateYearDay(Era,int,int)
+ MinguoEra eraOf(int)
+}
+
+class MinguoDate -> java.time.chrono.MinguoDate extends Comparable,ChronoLocalDate,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ MinguoDate now()
+ MinguoDate now(ZoneId)
+ MinguoDate of(int,int,int)
+ MinguoDate from(TemporalAccessor)
+ MinguoChronology getChronology()
+ MinguoEra getEra()
+ MinguoDate with(TemporalField,long)
+ MinguoDate with(TemporalAdjuster)
+ MinguoDate plus(TemporalAmount)
+ MinguoDate minus(TemporalAmount)
+ MinguoDate plus(long,TemporalUnit)
+ MinguoDate minus(long,TemporalUnit)
+}
+
+class ThaiBuddhistChronology -> java.time.chrono.ThaiBuddhistChronology extends AbstractChronology,Comparable,Chronology,Object {
+ ThaiBuddhistChronology INSTANCE
+ ThaiBuddhistDate date(TemporalAccessor)
+ ThaiBuddhistDate date(int,int,int)
+ ThaiBuddhistDate date(Era,int,int,int)
+ ThaiBuddhistDate dateEpochDay(long)
+ ThaiBuddhistDate dateNow()
+ ThaiBuddhistDate dateNow(ZoneId)
+ ThaiBuddhistDate dateYearDay(int,int)
+ ThaiBuddhistDate dateYearDay(Era,int,int)
+ ThaiBuddhistEra eraOf(int)
+}
+
+class ThaiBuddhistDate -> java.time.chrono.ThaiBuddhistDate extends Comparable,ChronoLocalDate,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ ThaiBuddhistDate now()
+ ThaiBuddhistDate now(ZoneId)
+ ThaiBuddhistDate of(int,int,int)
+ ThaiBuddhistDate from(TemporalAccessor)
+ ThaiBuddhistChronology getChronology()
+ ThaiBuddhistEra getEra()
+ ThaiBuddhistDate with(TemporalField,long)
+ ThaiBuddhistDate with(TemporalAdjuster)
+ ThaiBuddhistDate plus(TemporalAmount)
+ ThaiBuddhistDate minus(TemporalAmount)
+ ThaiBuddhistDate plus(long,TemporalUnit)
+ ThaiBuddhistDate minus(long,TemporalUnit)
+}
+
+#### Enums
+
+class HijrahEra -> java.time.chrono.HijrahEra extends Enum,Comparable,Era,TemporalAccessor,TemporalAdjuster,Object {
+ HijrahEra AH
+ int getValue()
+ HijrahEra of(int)
+ HijrahEra valueOf(String)
+ HijrahEra[] values()
+}
+
+class IsoEra -> java.time.chrono.IsoEra extends Enum,Comparable,Era,TemporalAccessor,TemporalAdjuster,Object {
+ IsoEra BCE
+ IsoEra CE
+ int getValue()
+ IsoEra of(int)
+ IsoEra valueOf(String)
+ IsoEra[] values()
+}
+
+class MinguoEra -> java.time.chrono.MinguoEra extends Enum,Comparable,Era,TemporalAccessor,TemporalAdjuster,Object {
+ MinguoEra BEFORE_ROC
+ MinguoEra ROC
+ int getValue()
+ MinguoEra of(int)
+ MinguoEra valueOf(String)
+ MinguoEra[] values()
+}
+
+class ThaiBuddhistEra -> java.time.chrono.ThaiBuddhistEra extends Enum,Comparable,Era,TemporalAccessor,TemporalAdjuster,Object {
+ ThaiBuddhistEra BE
+ ThaiBuddhistEra BEFORE_BE
+ int getValue()
+ ThaiBuddhistEra of(int)
+ ThaiBuddhistEra valueOf(String)
+ ThaiBuddhistEra[] values()
+}
diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.format.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.format.txt
new file mode 100644
index 00000000000..20831c4b6b4
--- /dev/null
+++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.format.txt
@@ -0,0 +1,177 @@
+#
+# Licensed to Elasticsearch under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Elasticsearch licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+#
+# Painless definition file. This defines the hierarchy of classes,
+# what methods and fields they have, etc.
+#
+
+#### Classes
+
+class DateTimeFormatter -> java.time.format.DateTimeFormatter extends Object {
+ DateTimeFormatter BASIC_ISO_DATE
+ DateTimeFormatter ISO_DATE
+ DateTimeFormatter ISO_DATE_TIME
+ DateTimeFormatter ISO_INSTANT
+ DateTimeFormatter ISO_LOCAL_DATE
+ DateTimeFormatter ISO_LOCAL_DATE_TIME
+ DateTimeFormatter ISO_LOCAL_TIME
+ DateTimeFormatter ISO_OFFSET_DATE
+ DateTimeFormatter ISO_OFFSET_DATE_TIME
+ DateTimeFormatter ISO_OFFSET_TIME
+ DateTimeFormatter ISO_ORDINAL_DATE
+ DateTimeFormatter ISO_TIME
+ DateTimeFormatter ISO_WEEK_DATE
+ DateTimeFormatter ISO_ZONED_DATE_TIME
+ DateTimeFormatter RFC_1123_DATE_TIME
+ String format(TemporalAccessor)
+ void formatTo(TemporalAccessor,Appendable)
+ Chronology getChronology()
+ DecimalStyle getDecimalStyle()
+ Locale getLocale()
+ Set getResolverFields()
+ ResolverStyle getResolverStyle()
+ ZoneId getZone()
+ DateTimeFormatter ofLocalizedDate(FormatStyle)
+ DateTimeFormatter ofLocalizedDateTime(FormatStyle)
+ DateTimeFormatter ofLocalizedDateTime(FormatStyle,FormatStyle)
+ DateTimeFormatter ofLocalizedTime(FormatStyle)
+ DateTimeFormatter ofPattern(String)
+ DateTimeFormatter ofPattern(String,Locale)
+ TemporalAccessor parse(CharSequence)
+ def parse(CharSequence,TemporalQuery)
+ TemporalAccessor parseBest(CharSequence,TemporalQuery[])
+ TemporalQuery parsedExcessDays()
+ TemporalQuery parsedLeapSecond()
+ TemporalAccessor parseUnresolved(CharSequence,ParsePosition)
+ Format toFormat()
+ Format toFormat(TemporalQuery)
+ DateTimeFormatter withChronology(Chronology)
+ DateTimeFormatter withDecimalStyle(DecimalStyle)
+ DateTimeFormatter withLocale(Locale)
+ DateTimeFormatter withResolverFields(Set)
+ DateTimeFormatter withResolverStyle(ResolverStyle)
+ DateTimeFormatter withZone(ZoneId)
+}
+
+class DateTimeFormatterBuilder -> java.time.format.DateTimeFormatterBuilder extends Object {
+ DateTimeFormatterBuilder ()
+ DateTimeFormatterBuilder append(DateTimeFormatter)
+ DateTimeFormatterBuilder appendChronologyId()
+ DateTimeFormatterBuilder appendChronologyText(TextStyle)
+ DateTimeFormatterBuilder appendFraction(TemporalField,int,int,boolean)
+ DateTimeFormatterBuilder appendInstant()
+ DateTimeFormatterBuilder appendInstant(int)
+ DateTimeFormatterBuilder appendLiteral(String)
+ DateTimeFormatterBuilder appendLocalized(FormatStyle,FormatStyle)
+ DateTimeFormatterBuilder appendLocalizedOffset(TextStyle)
+ DateTimeFormatterBuilder appendOffset(String,String)
+ DateTimeFormatterBuilder appendOffsetId()
+ DateTimeFormatterBuilder appendOptional(DateTimeFormatter)
+ DateTimeFormatterBuilder appendPattern(String)
+ DateTimeFormatterBuilder appendText(TemporalField)
+ DateTimeFormatterBuilder appendText(TemporalField,TextStyle)
+ DateTimeFormatterBuilder appendValue(TemporalField)
+ DateTimeFormatterBuilder appendValue(TemporalField,int)
+ DateTimeFormatterBuilder appendValue(TemporalField,int,int,SignStyle)
+ DateTimeFormatterBuilder appendValueReduced(TemporalField,int,int,int)
+ DateTimeFormatterBuilder appendZoneId()
+ DateTimeFormatterBuilder appendZoneOrOffsetId()
+ DateTimeFormatterBuilder appendZoneRegionId()
+ DateTimeFormatterBuilder appendZoneText(TextStyle)
+ DateTimeFormatterBuilder appendZoneText(TextStyle,Set)
+ String getLocalizedDateTimePattern(FormatStyle,FormatStyle,Chronology,Locale)
+ DateTimeFormatterBuilder optionalEnd()
+ DateTimeFormatterBuilder optionalStart()
+ DateTimeFormatterBuilder padNext(int)
+ DateTimeFormatterBuilder padNext(int,char)
+ DateTimeFormatterBuilder parseCaseInsensitive()
+ DateTimeFormatterBuilder parseCaseSensitive()
+ DateTimeFormatterBuilder parseDefaulting(TemporalField,long)
+ DateTimeFormatterBuilder parseLenient()
+ DateTimeFormatterBuilder parseStrict()
+ DateTimeFormatter toFormatter()
+ DateTimeFormatter toFormatter(Locale)
+}
+
+class DecimalStyle -> java.time.format.DecimalStyle extends Object {
+ DecimalStyle STANDARD
+ Set getAvailableLocales()
+ char getDecimalSeparator()
+ char getNegativeSign()
+ char getPositiveSign()
+ char getZeroDigit()
+ DecimalStyle of(Locale)
+ DecimalStyle ofDefaultLocale()
+ DecimalStyle withDecimalSeparator(char)
+ DecimalStyle withNegativeSign(char)
+ DecimalStyle withPositiveSign(char)
+ DecimalStyle withZeroDigit(char)
+}
+
+#### Enums
+
+class FormatStyle -> java.time.format.FormatStyle extends Enum,Comparable,Object {
+ FormatStyle FULL
+ FormatStyle LONG
+ FormatStyle MEDIUM
+ FormatStyle SHORT
+ FormatStyle valueOf(String)
+ FormatStyle[] values()
+}
+
+class ResolverStyle -> java.time.format.ResolverStyle extends Enum,Comparable,Object {
+ ResolverStyle LENIENT
+ ResolverStyle SMART
+ ResolverStyle STRICT
+ ResolverStyle valueOf(String)
+ ResolverStyle[] values()
+}
+
+class SignStyle -> java.time.format.SignStyle extends Enum,Comparable,Object {
+ SignStyle ALWAYS
+ SignStyle EXCEEDS_PAD
+ SignStyle NEVER
+ SignStyle NORMAL
+ SignStyle NOT_NEGATIVE
+ SignStyle valueOf(String)
+ SignStyle[] values()
+}
+
+class TextStyle -> java.time.format.TextStyle extends Enum,Comparable,Object {
+ TextStyle FULL
+ TextStyle FULL_STANDALONE
+ TextStyle NARROW
+ TextStyle NARROW_STANDALONE
+ TextStyle SHORT
+ TextStyle SHORT_STANDALONE
+ TextStyle asNormal()
+ TextStyle asStandalone()
+ boolean isStandalone()
+ TextStyle valueOf(String)
+ TextStyle[] values()
+}
+
+#### Exceptions
+
+class DateTimeParseException -> java.time.format.DateTimeParseException extends DateTimeException,RuntimeException,Exception,Object {
+ DateTimeParseException (String,CharSequence,int)
+ int getErrorIndex()
+ String getParsedString()
+}
diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.temporal.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.temporal.txt
new file mode 100644
index 00000000000..9094dab6ba1
--- /dev/null
+++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.temporal.txt
@@ -0,0 +1,225 @@
+#
+# Licensed to Elasticsearch under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Elasticsearch licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+#
+# Painless definition file. This defines the hierarchy of classes,
+# what methods and fields they have, etc.
+#
+
+#### Interfaces
+
+class Temporal -> java.time.temporal.Temporal extends TemporalAccessor {
+ Temporal minus(long,TemporalUnit)
+ Temporal minus(TemporalAmount)
+ Temporal plus(long,TemporalUnit)
+ Temporal plus(TemporalAmount)
+ long until(Temporal,TemporalUnit)
+ Temporal with(TemporalAdjuster)
+ Temporal with(TemporalField,long)
+}
+
+class TemporalAccessor -> java.time.temporal.TemporalAccessor {
+ int get(TemporalField)
+ long getLong(TemporalField)
+ boolean isSupported(TemporalField)
+ def query(TemporalQuery)
+ ValueRange range(TemporalField)
+}
+
+class TemporalAdjuster -> java.time.temporal.TemporalAdjuster {
+ Temporal adjustInto(Temporal)
+}
+
+class TemporalAmount -> java.time.temporal.TemporalAmount {
+ Temporal addTo(Temporal)
+ long get(TemporalUnit)
+ List getUnits()
+ Temporal subtractFrom(Temporal)
+}
+
+class TemporalField -> java.time.temporal.TemporalField {
+ Temporal adjustInto(Temporal,long)
+ TemporalUnit getBaseUnit()
+ String getDisplayName(Locale)
+ long getFrom(TemporalAccessor)
+ TemporalUnit getRangeUnit()
+ boolean isDateBased()
+ boolean isSupportedBy(TemporalAccessor)
+ boolean isTimeBased()
+ ValueRange range()
+ ValueRange rangeRefinedBy(TemporalAccessor)
+ TemporalAccessor resolve(Map,TemporalAccessor,ResolverStyle)
+ String toString()
+}
+
+class TemporalQuery -> java.time.temporal.TemporalQuery {
+ def queryFrom(TemporalAccessor)
+}
+
+class TemporalUnit -> java.time.temporal.TemporalUnit {
+ Temporal addTo(Temporal,long)
+ long between(Temporal,Temporal)
+ Duration getDuration()
+ boolean isDateBased()
+ boolean isDurationEstimated()
+ boolean isSupportedBy(Temporal)
+ boolean isTimeBased()
+ String toString()
+}
+
+#### Classes
+
+class IsoFields -> java.time.temporal.IsoFields extends Object {
+ TemporalField DAY_OF_QUARTER
+ TemporalField QUARTER_OF_YEAR
+ TemporalUnit QUARTER_YEARS
+ TemporalField WEEK_BASED_YEAR
+ TemporalUnit WEEK_BASED_YEARS
+ TemporalField WEEK_OF_WEEK_BASED_YEAR
+}
+
+class JulianFields -> java.time.temporal.JulianFields extends Object {
+ TemporalField JULIAN_DAY
+ TemporalField MODIFIED_JULIAN_DAY
+ TemporalField RATA_DIE
+}
+
+class TemporalAdjusters -> java.time.temporal.TemporalAdjusters extends Object {
+ TemporalAdjuster dayOfWeekInMonth(int,DayOfWeek)
+ TemporalAdjuster firstDayOfMonth()
+ TemporalAdjuster firstDayOfNextMonth()
+ TemporalAdjuster firstDayOfNextYear()
+ TemporalAdjuster firstDayOfYear()
+ TemporalAdjuster firstInMonth(DayOfWeek)
+ TemporalAdjuster lastDayOfMonth()
+ TemporalAdjuster lastDayOfYear()
+ TemporalAdjuster lastInMonth(DayOfWeek)
+ TemporalAdjuster next(DayOfWeek)
+ TemporalAdjuster nextOrSame(DayOfWeek)
+ TemporalAdjuster ofDateAdjuster(UnaryOperator)
+ TemporalAdjuster previous(DayOfWeek)
+ TemporalAdjuster previousOrSame(DayOfWeek)
+}
+
+class TemporalQueries -> java.time.temporal.TemporalQueries extends Object {
+ TemporalQuery chronology()
+ TemporalQuery localDate()
+ TemporalQuery localTime()
+ TemporalQuery offset()
+ TemporalQuery precision()
+ TemporalQuery zone()
+ TemporalQuery zoneId()
+}
+
+class ValueRange -> java.time.temporal.ValueRange extends Object {
+ int checkValidIntValue(long,TemporalField)
+ long checkValidValue(long,TemporalField)
+ long getLargestMinimum()
+ long getMaximum()
+ long getMinimum()
+ long getSmallestMaximum()
+ boolean isFixed()
+ boolean isIntValue()
+ boolean isValidIntValue(long)
+ boolean isValidValue(long)
+ ValueRange of(long,long)
+ ValueRange of(long,long,long)
+ ValueRange of(long,long,long,long)
+}
+
+class WeekFields -> java.time.temporal.WeekFields extends Object {
+ WeekFields ISO
+ WeekFields SUNDAY_START
+ TemporalUnit WEEK_BASED_YEARS
+ TemporalField dayOfWeek()
+ DayOfWeek getFirstDayOfWeek()
+ int getMinimalDaysInFirstWeek()
+ WeekFields of(DayOfWeek,int)
+ WeekFields of(Locale)
+ TemporalField weekBasedYear()
+ TemporalField weekOfMonth()
+ TemporalField weekOfWeekBasedYear()
+ TemporalField weekOfYear()
+}
+
+#### Enums
+
+class ChronoField -> java.time.temporal.ChronoField extends Enum,Comparable,TemporalField,Object {
+ ChronoField ALIGNED_DAY_OF_WEEK_IN_MONTH
+ ChronoField ALIGNED_DAY_OF_WEEK_IN_YEAR
+ ChronoField ALIGNED_WEEK_OF_MONTH
+ ChronoField ALIGNED_WEEK_OF_YEAR
+ ChronoField AMPM_OF_DAY
+ ChronoField CLOCK_HOUR_OF_AMPM
+ ChronoField CLOCK_HOUR_OF_DAY
+ ChronoField DAY_OF_MONTH
+ ChronoField DAY_OF_WEEK
+ ChronoField DAY_OF_YEAR
+ ChronoField EPOCH_DAY
+ ChronoField ERA
+ ChronoField HOUR_OF_AMPM
+ ChronoField HOUR_OF_DAY
+ ChronoField INSTANT_SECONDS
+ ChronoField MICRO_OF_DAY
+ ChronoField MICRO_OF_SECOND
+ ChronoField MILLI_OF_DAY
+ ChronoField MILLI_OF_SECOND
+ ChronoField MINUTE_OF_DAY
+ ChronoField MINUTE_OF_HOUR
+ ChronoField MONTH_OF_YEAR
+ ChronoField NANO_OF_DAY
+ ChronoField NANO_OF_SECOND
+ ChronoField OFFSET_SECONDS
+ ChronoField PROLEPTIC_MONTH
+ ChronoField SECOND_OF_DAY
+ ChronoField SECOND_OF_MINUTE
+ ChronoField YEAR
+ ChronoField YEAR_OF_ERA
+ int checkValidIntValue(long)
+ long checkValidValue(long)
+ ChronoField valueOf(String)
+ ChronoField[] values()
+}
+
+class ChronoUnit -> java.time.temporal.ChronoUnit extends Enum,Comparable,TemporalUnit,Object {
+ ChronoUnit CENTURIES
+ ChronoUnit DAYS
+ ChronoUnit DECADES
+ ChronoUnit ERAS
+ ChronoUnit FOREVER
+ ChronoUnit HALF_DAYS
+ ChronoUnit HOURS
+ ChronoUnit MICROS
+ ChronoUnit MILLENNIA
+ ChronoUnit MILLIS
+ ChronoUnit MINUTES
+ ChronoUnit MONTHS
+ ChronoUnit NANOS
+ ChronoUnit SECONDS
+ ChronoUnit WEEKS
+ ChronoUnit YEARS
+ ChronoUnit valueOf(String)
+ ChronoUnit[] values()
+}
+
+#### Exceptions
+
+class UnsupportedTemporalTypeException -> java.time.temporal.UnsupportedTemporalTypeException extends DateTimeException,RuntimeException,Exception,Object {
+ UnsupportedTemporalTypeException (String)
+}
diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.txt
new file mode 100644
index 00000000000..1b7d19a6c9c
--- /dev/null
+++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.txt
@@ -0,0 +1,643 @@
+#
+# Licensed to Elasticsearch under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Elasticsearch licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+#
+# Painless definition file. This defines the hierarchy of classes,
+# what methods and fields they have, etc.
+#
+
+#### Classes
+
+class Clock -> java.time.Clock extends Object {
+ Clock fixed(Instant,ZoneId)
+ ZoneId getZone()
+ Instant instant()
+ long millis()
+ Clock offset(Clock,Duration)
+ Clock system(ZoneId)
+ Clock systemDefaultZone()
+ Clock systemUTC()
+ Clock tick(Clock,Duration)
+ Clock tickMinutes(ZoneId)
+ Clock tickSeconds(ZoneId)
+ Clock withZone(ZoneId)
+}
+
+class Duration -> java.time.Duration extends Comparable,TemporalAmount,Object {
+ Duration ZERO
+ Duration abs()
+ Duration between(Temporal,Temporal)
+ Duration dividedBy(long)
+ Duration from(TemporalAmount)
+ int getNano()
+ long getSeconds()
+ boolean isNegative()
+ boolean isZero()
+ Duration minus(Duration)
+ Duration minus(long,TemporalUnit)
+ Duration minusDays(long)
+ Duration minusHours(long)
+ Duration minusMinutes(long)
+ Duration minusSeconds(long)
+ Duration minusMillis(long)
+ Duration minusNanos(long)
+ Duration multipliedBy(long)
+ Duration negated()
+ Duration of(long,TemporalUnit)
+ Duration ofDays(long)
+ Duration ofHours(long)
+ Duration ofMillis(long)
+ Duration ofMinutes(long)
+ Duration ofNanos(long)
+ Duration ofSeconds(long)
+ Duration ofSeconds(long,long)
+ Duration parse(CharSequence)
+ Duration plus(Duration)
+ Duration plus(long,TemporalUnit)
+ Duration plusDays(long)
+ Duration plusHours(long)
+ Duration plusMinutes(long)
+ Duration plusSeconds(long)
+ Duration plusMillis(long)
+ Duration plusNanos(long)
+ long toDays()
+ long toHours()
+ long toMinutes()
+ long toMillis()
+ long toNanos()
+ Duration withSeconds(long)
+ Duration withNanos(int)
+}
+
+class Instant -> java.time.Instant extends Comparable,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ Instant EPOCH
+ Instant MAX
+ Instant MIN
+ OffsetDateTime atOffset(ZoneOffset)
+ ZonedDateTime atZone(ZoneId)
+ Instant from(TemporalAccessor)
+ long getEpochSecond()
+ int getNano()
+ boolean isAfter(Instant)
+ boolean isBefore(Instant)
+ Instant minus(TemporalAmount)
+ Instant minus(long,TemporalUnit)
+ Instant minusMillis(long)
+ Instant minusNanos(long)
+ Instant minusSeconds(long)
+ Instant now()
+ Instant now(Clock)
+ Instant ofEpochSecond(long)
+ Instant ofEpochSecond(long,long)
+ Instant ofEpochMilli(long)
+ Instant parse(CharSequence)
+ Instant plus(TemporalAmount)
+ Instant plus(long,TemporalUnit)
+ Instant plusMillis(long)
+ Instant plusNanos(long)
+ Instant plusSeconds(long)
+ long toEpochMilli()
+ Instant truncatedTo(TemporalUnit)
+ Instant with(TemporalAdjuster)
+ Instant with(TemporalField,long)
+}
+
+class LocalDate -> java.time.LocalDate extends Comparable,ChronoLocalDate,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ LocalDate MAX
+ LocalDate MIN
+ LocalDateTime atStartOfDay()
+ ZonedDateTime atStartOfDay(ZoneId)
+ LocalDateTime atTime(LocalTime)
+ LocalDateTime atTime(int,int)
+ LocalDateTime atTime(int,int,int)
+ LocalDateTime atTime(int,int,int,int)
+ LocalDate from(TemporalAccessor)
+ IsoChronology getChronology()
+ int getDayOfMonth()
+ DayOfWeek getDayOfWeek()
+ int getDayOfYear()
+ Month getMonth()
+ int getMonthValue()
+ int getYear()
+ LocalDate minus(TemporalAmount)
+ LocalDate minus(long,TemporalUnit)
+ LocalDate minusYears(long)
+ LocalDate minusMonths(long)
+ LocalDate minusWeeks(long)
+ LocalDate minusDays(long)
+ LocalDate now()
+ LocalDate now(ZoneId)
+ LocalDate of(int,int,int)
+ LocalDate ofYearDay(int,int)
+ LocalDate ofEpochDay(long)
+ LocalDate parse(CharSequence)
+ LocalDate parse(CharSequence,DateTimeFormatter)
+ LocalDate plus(TemporalAmount)
+ LocalDate plus(long,TemporalUnit)
+ LocalDate plusYears(long)
+ LocalDate plusMonths(long)
+ LocalDate plusWeeks(long)
+ LocalDate plusDays(long)
+ LocalDate with(TemporalAdjuster)
+ LocalDate with(TemporalField,long)
+ LocalDate withDayOfMonth(int)
+ LocalDate withDayOfYear(int)
+ LocalDate withMonth(int)
+ LocalDate withYear(int)
+}
+
+class LocalDateTime -> java.time.LocalDateTime extends Comparable,ChronoLocalDateTime,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ LocalDateTime MIN
+ LocalDateTime MAX
+ OffsetDateTime atOffset(ZoneOffset)
+ ZonedDateTime atZone(ZoneId)
+ LocalDateTime from(TemporalAccessor)
+ int getDayOfMonth()
+ DayOfWeek getDayOfWeek()
+ int getDayOfYear()
+ int getHour()
+ int getMinute()
+ Month getMonth()
+ int getMonthValue()
+ int getNano()
+ int getSecond()
+ int getYear()
+ LocalDateTime minus(TemporalAmount)
+ LocalDateTime minus(long,TemporalUnit)
+ LocalDateTime minusDays(long)
+ LocalDateTime minusHours(long)
+ LocalDateTime minusMinutes(long)
+ LocalDateTime minusMonths(long)
+ LocalDateTime minusNanos(long)
+ LocalDateTime minusSeconds(long)
+ LocalDateTime minusWeeks(long)
+ LocalDateTime minusYears(long)
+ LocalDateTime now()
+ LocalDateTime now(ZoneId)
+ LocalDateTime of(LocalDate,LocalTime)
+ LocalDateTime of(int,int,int,int,int)
+ LocalDateTime of(int,int,int,int,int,int)
+ LocalDateTime of(int,int,int,int,int,int,int)
+ LocalDateTime ofInstant(Instant,ZoneId)
+ LocalDateTime ofEpochSecond(long,int,ZoneOffset)
+ LocalDateTime parse(CharSequence)
+ LocalDateTime parse(CharSequence,DateTimeFormatter)
+ LocalDateTime plus(TemporalAmount)
+ LocalDateTime plus(long,TemporalUnit)
+ LocalDateTime plusDays(long)
+ LocalDateTime plusHours(long)
+ LocalDateTime plusMinutes(long)
+ LocalDateTime plusMonths(long)
+ LocalDateTime plusNanos(long)
+ LocalDateTime plusSeconds(long)
+ LocalDateTime plusWeeks(long)
+ LocalDateTime plusYears(long)
+ LocalDateTime truncatedTo(TemporalUnit)
+ LocalDateTime with(TemporalAdjuster)
+ LocalDateTime with(TemporalField,long)
+ LocalDateTime withDayOfMonth(int)
+ LocalDateTime withDayOfYear(int)
+ LocalDateTime withHour(int)
+ LocalDateTime withMinute(int)
+ LocalDateTime withMonth(int)
+ LocalDateTime withSecond(int)
+ LocalDateTime withYear(int)
+}
+
+class LocalTime -> java.time.LocalTime extends Comparable,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ LocalTime MAX
+ LocalTime MIDNIGHT
+ LocalTime MIN
+ LocalTime NOON
+ LocalDateTime atDate(LocalDate)
+ OffsetTime atOffset(ZoneOffset)
+ String format(DateTimeFormatter)
+ LocalTime from(TemporalAccessor)
+ int getHour()
+ int getMinute()
+ int getNano()
+ int getSecond()
+ boolean isAfter(LocalTime)
+ boolean isBefore(LocalTime)
+ LocalTime minus(TemporalAmount)
+ LocalTime minus(long,TemporalUnit)
+ LocalTime minusHours(long)
+ LocalTime minusMinutes(long)
+ LocalTime minusNanos(long)
+ LocalTime minusSeconds(long)
+ LocalTime now()
+ LocalTime now(ZoneId)
+ LocalTime of(int,int)
+ LocalTime of(int,int,int)
+ LocalTime of(int,int,int,int)
+ LocalTime ofNanoOfDay(long)
+ LocalTime ofSecondOfDay(long)
+ LocalTime parse(CharSequence)
+ LocalTime parse(CharSequence,DateTimeFormatter)
+ LocalTime plus(TemporalAmount)
+ LocalTime plus(long,TemporalUnit)
+ LocalTime plusHours(long)
+ LocalTime plusMinutes(long)
+ LocalTime plusNanos(long)
+ LocalTime plusSeconds(long)
+ long toNanoOfDay()
+ int toSecondOfDay()
+ LocalTime truncatedTo(TemporalUnit)
+ LocalTime with(TemporalAdjuster)
+ LocalTime with(TemporalField,long)
+ LocalTime withHour(int)
+ LocalTime withMinute(int)
+ LocalTime withNano(int)
+ LocalTime withSecond(int)
+}
+
+class MonthDay -> java.time.MonthDay extends Comparable,TemporalAccessor,TemporalAdjuster,Object {
+ LocalDate atYear(int)
+ String format(DateTimeFormatter)
+ MonthDay from(TemporalAccessor)
+ int getMonthValue()
+ Month getMonth()
+ int getDayOfMonth()
+ boolean isAfter(MonthDay)
+ boolean isBefore(MonthDay)
+ boolean isValidYear(int)
+ MonthDay now()
+ MonthDay now(ZoneId)
+ MonthDay of(int,int)
+ MonthDay parse(CharSequence)
+ MonthDay parse(CharSequence,DateTimeFormatter)
+ MonthDay with(Month)
+ MonthDay withDayOfMonth(int)
+ MonthDay withMonth(int)
+}
+
+class OffsetDateTime -> java.time.OffsetDateTime extends Comparable,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ OffsetDateTime MAX
+ OffsetDateTime MIN
+ ZonedDateTime atZoneSameInstant(ZoneId)
+ ZonedDateTime atZoneSimilarLocal(ZoneId)
+ String format(DateTimeFormatter)
+ OffsetDateTime from(TemporalAccessor)
+ int getDayOfMonth()
+ DayOfWeek getDayOfWeek()
+ int getDayOfYear()
+ int getHour()
+ int getMinute()
+ Month getMonth()
+ int getMonthValue()
+ int getNano()
+ ZoneOffset getOffset()
+ int getSecond()
+ int getYear()
+ boolean isAfter(OffsetDateTime)
+ boolean isBefore(OffsetDateTime)
+ boolean isEqual(OffsetDateTime)
+ OffsetDateTime minus(TemporalAmount)
+ OffsetDateTime minus(long,TemporalUnit)
+ OffsetDateTime minusYears(long)
+ OffsetDateTime minusMonths(long)
+ OffsetDateTime minusWeeks(long)
+ OffsetDateTime minusDays(long)
+ OffsetDateTime minusHours(long)
+ OffsetDateTime minusMinutes(long)
+ OffsetDateTime minusSeconds(long)
+ OffsetDateTime minusNanos(long)
+ OffsetDateTime now()
+ OffsetDateTime now(ZoneId)
+ OffsetDateTime of(LocalDate,LocalTime,ZoneOffset)
+ OffsetDateTime of(LocalDateTime,ZoneOffset)
+ OffsetDateTime of(int,int,int,int,int,int,int,ZoneOffset)
+ OffsetDateTime ofInstant(Instant,ZoneId)
+ OffsetDateTime parse(CharSequence)
+ OffsetDateTime parse(CharSequence,DateTimeFormatter)
+ OffsetDateTime plus(TemporalAmount)
+ OffsetDateTime plus(long,TemporalUnit)
+ OffsetDateTime plusYears(long)
+ OffsetDateTime plusMonths(long)
+ OffsetDateTime plusWeeks(long)
+ OffsetDateTime plusDays(long)
+ OffsetDateTime plusHours(long)
+ OffsetDateTime plusMinutes(long)
+ OffsetDateTime plusSeconds(long)
+ OffsetDateTime plusNanos(long)
+ Comparator timeLineOrder()
+ long toEpochSecond()
+ Instant toInstant()
+ LocalDate toLocalDate()
+ LocalDateTime toLocalDateTime()
+ LocalTime toLocalTime()
+ OffsetTime toOffsetTime()
+ ZonedDateTime toZonedDateTime()
+ OffsetDateTime truncatedTo(TemporalUnit)
+ OffsetDateTime with(TemporalAdjuster)
+ OffsetDateTime with(TemporalField,long)
+ OffsetDateTime withDayOfMonth(int)
+ OffsetDateTime withDayOfYear(int)
+ OffsetDateTime withHour(int)
+ OffsetDateTime withMinute(int)
+ OffsetDateTime withMonth(int)
+ OffsetDateTime withNano(int)
+ OffsetDateTime withSecond(int)
+ OffsetDateTime withYear(int)
+ OffsetDateTime withOffsetSameLocal(ZoneOffset)
+ OffsetDateTime withOffsetSameInstant(ZoneOffset)
+}
+
+class OffsetTime -> java.time.OffsetTime extends Comparable,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ OffsetTime MAX
+ OffsetTime MIN
+ String format(DateTimeFormatter)
+ OffsetTime from(TemporalAccessor)
+ int getHour()
+ int getMinute()
+ ZoneOffset getOffset()
+ int getSecond()
+ int getNano()
+ boolean isAfter(OffsetTime)
+ boolean isBefore(OffsetTime)
+ boolean isEqual(OffsetTime)
+ OffsetTime now()
+ OffsetTime now(ZoneId)
+ OffsetTime of(LocalTime,ZoneOffset)
+ OffsetTime of(int,int,int,int,ZoneOffset)
+ OffsetTime ofInstant(Instant,ZoneId)
+ OffsetTime plus(TemporalAmount)
+ OffsetTime plus(long,TemporalUnit)
+ OffsetTime plusHours(long)
+ OffsetTime plusMinutes(long)
+ OffsetTime plusSeconds(long)
+ OffsetTime plusNanos(long)
+ OffsetTime minus(TemporalAmount)
+ OffsetTime minus(long,TemporalUnit)
+ OffsetTime minusHours(long)
+ OffsetTime minusMinutes(long)
+ OffsetTime minusSeconds(long)
+ OffsetTime minusNanos(long)
+ OffsetTime parse(CharSequence)
+ OffsetTime parse(CharSequence,DateTimeFormatter)
+ LocalTime toLocalTime()
+ OffsetTime truncatedTo(TemporalUnit)
+ OffsetTime with(TemporalAdjuster)
+ OffsetTime with(TemporalField,long)
+ OffsetTime withHour(int)
+ OffsetTime withMinute(int)
+ OffsetTime withNano(int)
+ OffsetTime withOffsetSameLocal(ZoneOffset)
+ OffsetTime withOffsetSameInstant(ZoneOffset)
+ OffsetTime withSecond(int)
+}
+
+class Period -> java.time.Period extends ChronoPeriod,TemporalAmount,Object {
+ Period ZERO
+ Period between(LocalDate,LocalDate)
+ Period from(TemporalAmount)
+ IsoChronology getChronology()
+ int getDays()
+ int getMonths()
+ int getYears()
+ Period of(int,int,int)
+ Period ofYears(int)
+ Period ofMonths(int)
+ Period ofWeeks(int)
+ Period ofDays(int)
+ Period parse(CharSequence)
+ Period plus(TemporalAmount)
+ Period plusYears(long)
+ Period plusMonths(long)
+ Period plusDays(long)
+ Period minus(TemporalAmount)
+ Period minusYears(long)
+ Period minusMonths(long)
+ Period minusDays(long)
+ Period multipliedBy(int)
+ Period negated()
+ Period normalized()
+ long toTotalMonths()
+ Period withDays(int)
+ Period withMonths(int)
+ Period withYears(int)
+}
+
+class Year -> java.time.Year extends Comparable,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ int MAX_VALUE
+ int MIN_VALUE
+ LocalDate atDay(int)
+ YearMonth atMonth(int)
+ LocalDate atMonthDay(MonthDay)
+ String format(DateTimeFormatter)
+ Year from(TemporalAccessor)
+ int getValue()
+ boolean isAfter(Year)
+ boolean isLeap()
+ boolean isLeap(long)
+ boolean isValidMonthDay(MonthDay)
+ int length()
+ Year minus(TemporalAmount)
+ Year minus(long,TemporalUnit)
+ Year minusYears(long)
+ Year now()
+ Year now(ZoneId)
+ Year of(int)
+ Year parse(CharSequence)
+ Year parse(CharSequence,DateTimeFormatter)
+ Year plus(TemporalAmount)
+ Year plus(long,TemporalUnit)
+ Year plusYears(long)
+ Year with(TemporalAdjuster)
+ Year with(TemporalField,long)
+}
+
+class YearMonth -> java.time.YearMonth extends Comparable,Temporal,TemporalAccessor,TemporalAdjuster,Object {
+ LocalDate atDay(int)
+ LocalDate atEndOfMonth()
+ String format(DateTimeFormatter)
+ YearMonth from(TemporalAccessor)
+ Month getMonth()
+ int getMonthValue()
+ int getYear()
+ boolean isAfter(YearMonth)
+ boolean isBefore(YearMonth)
+ boolean isLeapYear()
+ boolean isValidDay(int)
+ int lengthOfMonth()
+ int lengthOfYear()
+ YearMonth minus(TemporalAmount)
+ YearMonth minus(long,TemporalUnit)
+ YearMonth minusYears(long)
+ YearMonth minusMonths(long)
+ YearMonth now()
+ YearMonth now(ZoneId)
+ YearMonth of(int,int)
+ YearMonth parse(CharSequence)
+ YearMonth parse(CharSequence,DateTimeFormatter)
+ YearMonth plus(TemporalAmount)
+ YearMonth plus(long,TemporalUnit)
+ YearMonth plusYears(long)
+ YearMonth plusMonths(long)
+ YearMonth with(TemporalAdjuster)
+ YearMonth with(TemporalField,long)
+ YearMonth withYear(int)
+ YearMonth withMonth(int)
+}
+
+class ZonedDateTime -> java.time.ZonedDateTime extends Comparable,ChronoZonedDateTime,Temporal,TemporalAccessor,Object {
+ int getDayOfMonth()
+ DayOfWeek getDayOfWeek()
+ int getDayOfYear()
+ int getHour()
+ LocalDate toLocalDate()
+ LocalDateTime toLocalDateTime()
+ int getMinute()
+ Month getMonth()
+ int getMonthValue()
+ int getNano()
+ int getSecond()
+ int getYear()
+ ZonedDateTime from(TemporalAccessor)
+ ZonedDateTime minus(TemporalAmount)
+ ZonedDateTime minus(long,TemporalUnit)
+ ZonedDateTime minusYears(long)
+ ZonedDateTime minusMonths(long)
+ ZonedDateTime minusWeeks(long)
+ ZonedDateTime minusDays(long)
+ ZonedDateTime minusHours(long)
+ ZonedDateTime minusMinutes(long)
+ ZonedDateTime minusSeconds(long)
+ ZonedDateTime minusNanos(long)
+ ZonedDateTime now()
+ ZonedDateTime now(ZoneId)
+ ZonedDateTime of(LocalDate,LocalTime,ZoneId)
+ ZonedDateTime of(LocalDateTime,ZoneId)
+ ZonedDateTime of(int,int,int,int,int,int,int,ZoneId)
+ ZonedDateTime ofInstant(Instant,ZoneId)
+ ZonedDateTime ofInstant(LocalDateTime,ZoneOffset,ZoneId)
+ ZonedDateTime ofLocal(LocalDateTime,ZoneId,ZoneOffset)
+ ZonedDateTime ofStrict(LocalDateTime,ZoneOffset,ZoneId)
+ ZonedDateTime parse(CharSequence)
+ ZonedDateTime parse(CharSequence,DateTimeFormatter)
+ ZonedDateTime plus(TemporalAmount)
+ ZonedDateTime plus(long,TemporalUnit)
+ ZonedDateTime plusDays(long)
+ ZonedDateTime plusHours(long)
+ ZonedDateTime plusMinutes(long)
+ ZonedDateTime plusMonths(long)
+ ZonedDateTime plusNanos(long)
+ ZonedDateTime plusSeconds(long)
+ ZonedDateTime plusWeeks(long)
+ ZonedDateTime plusYears(long)
+ OffsetDateTime toOffsetDateTime()
+ ZonedDateTime truncatedTo(TemporalUnit)
+ ZonedDateTime with(TemporalAdjuster)
+ ZonedDateTime with(TemporalField,long)
+ ZonedDateTime withDayOfMonth(int)
+ ZonedDateTime withDayOfYear(int)
+ ZonedDateTime withEarlierOffsetAtOverlap()
+ ZonedDateTime withFixedOffsetZone()
+ ZonedDateTime withHour(int)
+ ZonedDateTime withLaterOffsetAtOverlap()
+ ZonedDateTime withMinute(int)
+ ZonedDateTime withMonth(int)
+ ZonedDateTime withNano(int)
+ ZonedDateTime withSecond(int)
+ ZonedDateTime withYear(int)
+ ZonedDateTime withZoneSameLocal(ZoneId)
+ ZonedDateTime withZoneSameInstant(ZoneId)
+}
+
+class ZoneId -> java.time.ZoneId extends Object {
+ Map SHORT_IDS
+ Set getAvailableZoneIds()
+ ZoneId of(String)
+ ZoneId of(String,Map)
+ ZoneId ofOffset(String,ZoneOffset)
+ ZoneId from(TemporalAccessor)
+ String getId()
+ String getDisplayName(TextStyle,Locale)
+ ZoneId normalized()
+ ZoneId systemDefault()
+ ZoneRules getRules()
+}
+
+class ZoneOffset -> java.time.ZoneOffset extends ZoneId,Object {
+ ZoneOffset MAX
+ ZoneOffset MIN
+ ZoneOffset UTC
+ ZoneOffset from(TemporalAccessor)
+ int getTotalSeconds()
+ ZoneOffset of(String)
+ ZoneOffset ofHours(int)
+ ZoneOffset ofHoursMinutes(int,int)
+ ZoneOffset ofHoursMinutesSeconds(int,int,int)
+ ZoneOffset ofTotalSeconds(int)
+}
+
+#### Enums
+
+class DayOfWeek -> java.time.DayOfWeek extends Enum,Comparable,TemporalAccessor,TemporalAdjuster,Object {
+ DayOfWeek FRIDAY
+ DayOfWeek MONDAY
+ DayOfWeek SATURDAY
+ DayOfWeek SUNDAY
+ DayOfWeek THURSDAY
+ DayOfWeek TUESDAY
+ DayOfWeek WEDNESDAY
+ DayOfWeek of(int)
+ DayOfWeek from(TemporalAccessor)
+ int getValue()
+ String getDisplayName(TextStyle,Locale)
+ DayOfWeek minus(long)
+ DayOfWeek plus(long)
+ DayOfWeek valueOf(String)
+ DayOfWeek[] values()
+}
+
+class Month -> java.time.Month extends Enum,Comparable,TemporalAccessor,TemporalAdjuster,Object {
+ Month APRIL
+ Month AUGUST
+ Month DECEMBER
+ Month FEBRUARY
+ Month JANUARY
+ Month JULY
+ Month JUNE
+ Month MARCH
+ Month MAY
+ Month NOVEMBER
+ Month OCTOBER
+ Month SEPTEMBER
+ Month from(TemporalAccessor)
+ int firstDayOfYear(boolean)
+ Month firstMonthOfQuarter()
+ int getValue()
+ String getDisplayName(TextStyle,Locale)
+ int length(boolean)
+ int maxLength()
+ int minLength()
+ Month minus(long)
+ Month of(int)
+ Month plus(long)
+ Month valueOf(String)
+ Month[] values()
+}
+
+#### Exceptions
+
+class DateTimeException -> java.time.DateTimeException extends RuntimeException,Exception,Object {
+ DateTimeException (String)
+}
+
diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.zone.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.zone.txt
new file mode 100644
index 00000000000..036e2a734f9
--- /dev/null
+++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.time.zone.txt
@@ -0,0 +1,93 @@
+#
+# Licensed to Elasticsearch under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Elasticsearch licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+#
+# Painless definition file. This defines the hierarchy of classes,
+# what methods and fields they have, etc.
+#
+
+#### Classes
+
+class ZoneOffsetTransition -> java.time.zone.ZoneOffsetTransition extends Comparable,Object {
+ LocalDateTime getDateTimeAfter()
+ LocalDateTime getDateTimeBefore()
+ Duration getDuration()
+ Instant getInstant()
+ ZoneOffset getOffsetAfter()
+ ZoneOffset getOffsetBefore()
+ boolean isGap()
+ boolean isOverlap()
+ boolean isValidOffset(ZoneOffset)
+ ZoneOffsetTransition of(LocalDateTime,ZoneOffset,ZoneOffset)
+ long toEpochSecond()
+}
+
+class ZoneOffsetTransitionRule -> java.time.zone.ZoneOffsetTransitionRule extends Object {
+ ZoneOffsetTransition createTransition(int)
+ int getDayOfMonthIndicator()
+ DayOfWeek getDayOfWeek()
+ LocalTime getLocalTime()
+ Month getMonth()
+ ZoneOffset getOffsetAfter()
+ ZoneOffset getOffsetBefore()
+ ZoneOffset getStandardOffset()
+ ZoneOffsetTransitionRule.TimeDefinition getTimeDefinition()
+ boolean isMidnightEndOfDay()
+ ZoneOffsetTransitionRule of(Month,int,DayOfWeek,LocalTime,boolean,ZoneOffsetTransitionRule.TimeDefinition,ZoneOffset,ZoneOffset,ZoneOffset)
+}
+
+class ZoneRules -> java.time.zone.ZoneRules extends Object {
+ Duration getDaylightSavings(Instant)
+ ZoneOffset getOffset(Instant)
+ ZoneOffset getStandardOffset(Instant)
+ ZoneOffsetTransition getTransition(LocalDateTime)
+ List getTransitionRules()
+ List getTransitions()
+ List getValidOffsets(LocalDateTime)
+ boolean isDaylightSavings(Instant)
+ boolean isFixedOffset()
+ boolean isValidOffset(LocalDateTime,ZoneOffset)
+ ZoneOffsetTransition nextTransition(Instant)
+ ZoneRules of(ZoneOffset)
+ ZoneRules of(ZoneOffset,ZoneOffset,List,List,List)
+ ZoneOffsetTransition previousTransition(Instant)
+}
+
+class ZoneRulesProvider -> java.time.zone.ZoneRulesProvider extends Object {
+ Set getAvailableZoneIds()
+ ZoneRules getRules(String,boolean)
+ NavigableMap getVersions(String)
+}
+
+#### Enums
+
+class ZoneOffsetTransitionRule.TimeDefinition -> java.time.zone.ZoneOffsetTransitionRule$TimeDefinition extends Enum,Comparable,Object {
+ ZoneOffsetTransitionRule.TimeDefinition STANDARD
+ ZoneOffsetTransitionRule.TimeDefinition UTC
+ ZoneOffsetTransitionRule.TimeDefinition WALL
+ LocalDateTime createDateTime(LocalDateTime,ZoneOffset,ZoneOffset)
+ ZoneOffsetTransitionRule.TimeDefinition valueOf(String)
+ ZoneOffsetTransitionRule.TimeDefinition[] values()
+}
+
+#### Exceptions
+
+class ZoneRulesException -> java.time.zone.ZoneRulesException extends DateTimeException,RuntimeException,Exception,Object {
+ ZoneRulesException (String)
+}
diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.txt
index 9be890f15d5..ffa80da79c1 100644
--- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.txt
+++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/java.util.txt
@@ -491,6 +491,7 @@ class Calendar -> java.util.Calendar extends Comparable,Object {
void setTimeInMillis(long)
void setTimeZone(TimeZone)
void setWeekDate(int,int,int)
+ Instant toInstant()
}
class Calendar.Builder -> java.util.Calendar$Builder extends Object {
@@ -583,6 +584,7 @@ class Date -> java.util.Date extends Comparable,Object {
boolean after(Date)
boolean before(Date)
def clone()
+ Date from(Instant)
long getTime()
void setTime(long)
}
@@ -976,6 +978,8 @@ class TimeZone -> java.util.TimeZone extends Object {
boolean hasSameRules(TimeZone)
boolean inDaylightTime(Date)
boolean observesDaylightTime()
+ void setRawOffset(int)
+ ZoneId toZoneId()
boolean useDaylightTime()
}
diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java
index 2ca46b4effc..5c766018f13 100644
--- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java
+++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java
@@ -23,10 +23,10 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import com.google.common.jimfs.Configuration;
import com.google.common.jimfs.Jimfs;
import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.SuppressForbidden;
import org.elasticsearch.Version;
import org.elasticsearch.cli.MockTerminal;
import org.elasticsearch.cli.UserError;
+import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.PathUtilsForTesting;
@@ -63,7 +63,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
-import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;